From 6a53ef383b7748d91f0818b88a5773e07097463a Mon Sep 17 00:00:00 2001 From: Dominik Riemer Date: Mon, 8 Jul 2024 19:01:20 +0200 Subject: [PATCH 1/3] Update StreamPipes version in docs --- docs/01_try-installation.md | 2 +- website-v2/docusaurus.config.js | 2 +- website-v2/src/pages/download.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/01_try-installation.md b/docs/01_try-installation.md index d856083f8..ea6b45d79 100644 --- a/docs/01_try-installation.md +++ b/docs/01_try-installation.md @@ -33,7 +33,7 @@ best experience), Firefox or Edge. ## Install StreamPipes - + ## Setup StreamPipes diff --git a/website-v2/docusaurus.config.js b/website-v2/docusaurus.config.js index 8c3edd9b9..8f181b3cd 100644 --- a/website-v2/docusaurus.config.js +++ b/website-v2/docusaurus.config.js @@ -100,7 +100,7 @@ module.exports = { textColor: 'white', isCloseable: false, content: - 'Apache StreamPipes 0.95.0 is available! ⭐️', + 'Apache StreamPipes 0.95.1 is available! ⭐️', }, "image": "img/favicon.png", "footer": { diff --git a/website-v2/src/pages/download.tsx b/website-v2/src/pages/download.tsx index a80a71f7e..6c8eccfb6 100644 --- a/website-v2/src/pages/download.tsx +++ b/website-v2/src/pages/download.tsx @@ -29,7 +29,7 @@ const Downloads: FC = () => (
Installation
From c8be3c0a25c3d98aaab0506635ff5aed19ea44a2 Mon Sep 17 00:00:00 2001 From: Dominik Riemer Date: Mon, 8 Jul 2024 19:04:23 +0200 Subject: [PATCH 2/3] Release StreamPipes docs 0.95.1 --- .../version-0.95.1/01_try-installation.md | 62 ++ .../version-0.95.1/01_try-overview.md | 134 ++++ .../version-0.95.1/01_try-tutorial.md | 20 + .../version-0.95.1/02_concepts-adapter.md | 7 + .../02_concepts-data-streams.md | 7 + .../version-0.95.1/02_concepts-glossary.md | 7 + .../version-0.95.1/02_concepts-overview.md | 46 ++ .../version-0.95.1/02_concepts-pipeline.md | 7 + .../version-0.95.1/02_introduction.md | 85 +++ .../version-0.95.1/03_use-configurations.md | 48 ++ .../version-0.95.1/03_use-connect.md | 72 +++ .../version-0.95.1/03_use-dashboard.md | 66 ++ .../version-0.95.1/03_use-data-explorer.md | 102 +++ .../03_use-install-pipeline-elements.md | 9 + .../03_use-managing-pipelines.md | 52 ++ .../version-0.95.1/03_use-notifications.md | 25 + .../version-0.95.1/03_use-pipeline-editor.md | 62 ++ .../05_deploy-choosing-the-right-flavor.md | 47 ++ .../version-0.95.1/05_deploy-docker.md | 104 +++ .../05_deploy-environment-variables.md | 88 +++ .../version-0.95.1/05_deploy-kubernetes.md | 269 ++++++++ .../version-0.95.1/05_deploy-security.md | 75 +++ .../version-0.95.1/05_deploy-use-ssl.md | 35 + .../version-0.95.1/06_extend-archetypes.md | 46 ++ .../version-0.95.1/06_extend-cli.md | 190 ++++++ .../version-0.95.1/06_extend-client.md | 204 ++++++ .../version-0.95.1/06_extend-customize-ui.md | 226 +++++++ .../06_extend-first-processor.md | 54 ++ .../06_extend-sdk-event-model.md | 141 ++++ .../version-0.95.1/06_extend-sdk-functions.md | 127 ++++ .../06_extend-sdk-migration-sd.md | 117 ++++ .../06_extend-sdk-migrations.md | 179 +++++ .../06_extend-sdk-output-strategies.md | 348 ++++++++++ .../06_extend-sdk-static-properties.md | 267 ++++++++ .../06_extend-sdk-stream-requirements.md | 181 ++++++ .../version-0.95.1/06_extend-setup.md | 50 ++ .../06_extend-tutorial-adapters.md | 612 ++++++++++++++++++ .../06_extend-tutorial-data-processors.md | 454 +++++++++++++ .../06_extend-tutorial-data-sinks.md | 272 ++++++++ .../07_technicals-architecture.md | 110 ++++ .../version-0.95.1/07_technicals-messaging.md | 65 ++ .../07_technicals-runtime-wrappers.md | 37 ++ .../07_technicals-user-guidance.md | 7 + .../version-0.95.1/08_debugging.md | 7 + .../version-0.95.1/08_monitoring.md | 7 + .../version-0.95.1/09_contribute.md | 17 + .../version-0.95.1/09_get-help.md | 25 + .../version-0.95.1/faq-common-problems.md | 73 +++ ...reampipes.connect.adapters.image.stream.md | 38 ++ ...apache.streampipes.connect.adapters.iss.md | 39 ++ ...s.connect.iiot.adapters.influxdb.stream.md | 41 ++ ...treampipes.connect.iiot.adapters.iolink.md | 90 +++ ...mpipes.connect.iiot.adapters.netio.mqtt.md | 64 ++ ...mpipes.connect.iiot.adapters.netio.rest.md | 64 ++ ...e.streampipes.connect.iiot.adapters.oi4.md | 88 +++ ...streampipes.connect.iiot.adapters.opcua.md | 76 +++ ...ipes.connect.iiot.adapters.plc4x.modbus.md | 75 +++ ...eampipes.connect.iiot.adapters.plc4x.s7.md | 96 +++ ...e.streampipes.connect.iiot.adapters.ros.md | 64 ++ ...connect.iiot.adapters.simulator.machine.md | 92 +++ ...pipes.connect.iiot.protocol.stream.file.md | 90 +++ ...pipes.connect.iiot.protocol.stream.http.md | 38 ++ ...connect.iiot.protocol.stream.httpserver.md | 51 ++ ...ipes.connect.iiot.protocol.stream.kafka.md | 38 ++ ...pipes.connect.iiot.protocol.stream.mqtt.md | 53 ++ ...pipes.connect.iiot.protocol.stream.nats.md | 69 ++ ...pes.connect.iiot.protocol.stream.pulsar.md | 38 ++ ...s.connect.iiot.protocol.stream.rocketmq.md | 38 ++ ...pes.connect.iiot.protocol.stream.tubemq.md | 54 ++ ....apache.streampipes.connectors.ros.sink.md | 60 ++ ...cation.jvm.generic-image-classification.md | 52 ++ ...r.imageclassification.jvm.image-cropper.md | 43 ++ ....imageclassification.jvm.image-enricher.md | 43 ++ ...es.processor.imageclassification.qrcode.md | 68 ++ ....processors.changedetection.jvm.welford.md | 72 +++ ...eampipes.processors.enricher.jvm.jseval.md | 55 ++ ...sors.enricher.jvm.processor.math.mathop.md | 56 ++ ...nricher.jvm.processor.math.staticmathop.md | 56 ++ ...ors.enricher.jvm.processor.trigonometry.md | 56 ++ ...pes.processors.enricher.jvm.valuechange.md | 52 ++ ...eampipes.processors.filters.jvm.compose.md | 50 ++ ...reampipes.processors.filters.jvm.enrich.md | 47 ++ ...treampipes.processors.filters.jvm.limit.md | 70 ++ ...treampipes.processors.filters.jvm.merge.md | 57 ++ ...es.processors.filters.jvm.movingaverage.md | 46 ++ ....processors.filters.jvm.numericalfilter.md | 56 ++ ...cessors.filters.jvm.numericaltextfilter.md | 67 ++ ...ors.filters.jvm.processor.booleanfilter.md | 52 ++ ...eampipes.processors.filters.jvm.project.md | 48 ++ ...reampipes.processors.filters.jvm.schema.md | 46 ++ ....streampipes.processors.filters.jvm.sdt.md | 85 +++ ...pipes.processors.filters.jvm.textfilter.md | 53 ++ ...mpipes.processors.filters.jvm.threshold.md | 56 ++ ...es.processors.filters.jvm.throughputmon.md | 56 ++ ...rs.geo.jvm.jts.processor.buffergeometry.md | 95 +++ ...ssors.geo.jvm.jts.processor.bufferpoint.md | 82 +++ ...s.processors.geo.jvm.jts.processor.epsg.md | 64 ++ ....geo.jvm.jts.processor.latlngtojtspoint.md | 73 +++ ...sors.geo.jvm.jts.processor.reprojection.md | 68 ++ ...essors.geo.jvm.jts.processor.trajectory.md | 83 +++ ...eo.jvm.jts.processor.validation.complex.md | 86 +++ ...geo.jvm.jts.processor.validation.simple.md | 80 +++ ....processor.distancecalculator.haversine.md | 61 ++ ...ssor.distancecalculator.haversinestatic.md | 74 +++ ...m.latlong.processor.geocoder.googlemaps.md | 61 ++ ...ong.processor.geocoder.googlemapsstatic.md | 62 ++ ...tlong.processor.revgeocoder.geocityname.md | 67 ++ ...o.jvm.latlong.processor.speedcalculator.md | 59 ++ ...che.streampipes.processors.siddhi.count.md | 66 ++ ....streampipes.processors.siddhi.increase.md | 65 ++ ...ampipes.processors.siddhi.listcollector.md | 51 ++ ...treampipes.processors.siddhi.listfilter.md | 53 ++ ...pipes.processors.siddhi.numericalfilter.md | 63 ++ ...ache.streampipes.processors.siddhi.topk.md | 53 ++ ...pipes.processors.textmining.jvm.chunker.md | 69 ++ ...es.processors.textmining.jvm.namefinder.md | 65 ++ ....processors.textmining.jvm.partofspeech.md | 62 ++ ...essors.textmining.jvm.sentencedetection.md | 59 ++ ...pes.processors.textmining.jvm.tokenizer.md | 59 ++ ...transformation.jvm.booloperator.counter.md | 66 ++ ...ransformation.jvm.booloperator.inverter.md | 51 ++ ...transformation.jvm.booloperator.logical.md | 42 ++ ...sformation.jvm.booloperator.timekeeping.md | 70 ++ ...s.transformation.jvm.booloperator.timer.md | 58 ++ ...essors.transformation.jvm.changed-value.md | 46 ++ ...ocessors.transformation.jvm.count-array.md | 55 ++ ...ocessors.transformation.jvm.csvmetadata.md | 77 +++ ....processors.transformation.jvm.datetime.md | 78 +++ ...ssors.transformation.jvm.duration-value.md | 51 ++ ...cessors.transformation.jvm.field-mapper.md | 74 +++ ...ocessors.transformation.jvm.fieldhasher.md | 55 ++ ...ocessors.transformation.jvm.fieldrename.md | 59 ++ ...sformation.jvm.measurementunitconverter.md | 53 ++ ...rmation.jvm.processor.booloperator.edge.md | 58 ++ ...mation.jvm.processor.booloperator.state.md | 63 ++ ...tion.jvm.processor.state.labeler.number.md | 58 ++ ...sformation.jvm.processor.staticmetadata.md | 74 +++ ...tion.jvm.processor.stringoperator.state.md | 51 ++ ...mation.jvm.processor.timestampextractor.md | 58 ++ ...pes.processors.transformation.jvm.round.md | 72 +++ ...ocessors.transformation.jvm.split-array.md | 60 ++ ...ansformation.jvm.stringoperator.counter.md | 65 ++ ...transformation.jvm.stringoperator.timer.md | 66 ++ ...cessors.transformation.jvm.taskduration.md | 50 ++ ...transformation.jvm.transform-to-boolean.md | 53 ++ ...treampipes.sinks.brokers.jvm.bufferrest.md | 58 ++ ...pache.streampipes.sinks.brokers.jvm.jms.md | 60 ++ ...che.streampipes.sinks.brokers.jvm.kafka.md | 61 ++ ...ache.streampipes.sinks.brokers.jvm.mqtt.md | 61 ++ ...ache.streampipes.sinks.brokers.jvm.nats.md | 78 +++ ...he.streampipes.sinks.brokers.jvm.pulsar.md | 63 ++ ....streampipes.sinks.brokers.jvm.rabbitmq.md | 73 +++ ...ache.streampipes.sinks.brokers.jvm.rest.md | 52 ++ ....streampipes.sinks.brokers.jvm.rocketmq.md | 59 ++ ...he.streampipes.sinks.brokers.jvm.tubemq.md | 61 ++ ...streampipes.sinks.brokers.jvm.websocket.md | 52 ++ ...pache.streampipes.sinks.databases.ditto.md | 73 +++ ...streampipes.sinks.databases.jvm.couchdb.md | 63 ++ ...treampipes.sinks.databases.jvm.influxdb.md | 85 +++ ...e.streampipes.sinks.databases.jvm.iotdb.md | 91 +++ ...e.streampipes.sinks.databases.jvm.opcua.md | 65 ++ ...eampipes.sinks.databases.jvm.postgresql.md | 73 +++ ...e.streampipes.sinks.databases.jvm.redis.md | 87 +++ ...streampipes.sinks.internal.jvm.datalake.md | 68 ++ ...ampipes.sinks.internal.jvm.notification.md | 63 ++ ...reampipes.sinks.notifications.jvm.email.md | 73 +++ ...ampipes.sinks.notifications.jvm.msteams.md | 86 +++ ...pipes.sinks.notifications.jvm.onesignal.md | 63 ++ ...reampipes.sinks.notifications.jvm.slack.md | 66 ++ ...mpipes.sinks.notifications.jvm.telegram.md | 70 ++ .../version-0.95.1/user-guide-first-steps.md | 209 ++++++ .../user-guide-for-quickstart.md | 134 ++++ .../version-0.95.1/user-guide-tour.md | 305 +++++++++ .../version-0.95.1-sidebars.json | 204 ++++++ website-v2/versions.json | 1 + 175 files changed, 13743 insertions(+) create mode 100644 website-v2/versioned_docs/version-0.95.1/01_try-installation.md create mode 100644 website-v2/versioned_docs/version-0.95.1/01_try-overview.md create mode 100644 website-v2/versioned_docs/version-0.95.1/01_try-tutorial.md create mode 100644 website-v2/versioned_docs/version-0.95.1/02_concepts-adapter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/02_concepts-data-streams.md create mode 100644 website-v2/versioned_docs/version-0.95.1/02_concepts-glossary.md create mode 100644 website-v2/versioned_docs/version-0.95.1/02_concepts-overview.md create mode 100644 website-v2/versioned_docs/version-0.95.1/02_concepts-pipeline.md create mode 100644 website-v2/versioned_docs/version-0.95.1/02_introduction.md create mode 100644 website-v2/versioned_docs/version-0.95.1/03_use-configurations.md create mode 100644 website-v2/versioned_docs/version-0.95.1/03_use-connect.md create mode 100644 website-v2/versioned_docs/version-0.95.1/03_use-dashboard.md create mode 100644 website-v2/versioned_docs/version-0.95.1/03_use-data-explorer.md create mode 100644 website-v2/versioned_docs/version-0.95.1/03_use-install-pipeline-elements.md create mode 100644 website-v2/versioned_docs/version-0.95.1/03_use-managing-pipelines.md create mode 100644 website-v2/versioned_docs/version-0.95.1/03_use-notifications.md create mode 100644 website-v2/versioned_docs/version-0.95.1/03_use-pipeline-editor.md create mode 100644 website-v2/versioned_docs/version-0.95.1/05_deploy-choosing-the-right-flavor.md create mode 100644 website-v2/versioned_docs/version-0.95.1/05_deploy-docker.md create mode 100644 website-v2/versioned_docs/version-0.95.1/05_deploy-environment-variables.md create mode 100644 website-v2/versioned_docs/version-0.95.1/05_deploy-kubernetes.md create mode 100644 website-v2/versioned_docs/version-0.95.1/05_deploy-security.md create mode 100644 website-v2/versioned_docs/version-0.95.1/05_deploy-use-ssl.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-archetypes.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-cli.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-client.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-customize-ui.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-first-processor.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-sdk-event-model.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-sdk-functions.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-sdk-migration-sd.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-sdk-migrations.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-sdk-output-strategies.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-sdk-static-properties.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-sdk-stream-requirements.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-setup.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-adapters.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-data-processors.md create mode 100644 website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-data-sinks.md create mode 100644 website-v2/versioned_docs/version-0.95.1/07_technicals-architecture.md create mode 100644 website-v2/versioned_docs/version-0.95.1/07_technicals-messaging.md create mode 100644 website-v2/versioned_docs/version-0.95.1/07_technicals-runtime-wrappers.md create mode 100644 website-v2/versioned_docs/version-0.95.1/07_technicals-user-guidance.md create mode 100644 website-v2/versioned_docs/version-0.95.1/08_debugging.md create mode 100644 website-v2/versioned_docs/version-0.95.1/08_monitoring.md create mode 100644 website-v2/versioned_docs/version-0.95.1/09_contribute.md create mode 100644 website-v2/versioned_docs/version-0.95.1/09_get-help.md create mode 100644 website-v2/versioned_docs/version-0.95.1/faq-common-problems.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.adapters.image.stream.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.adapters.iss.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.influxdb.stream.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.iolink.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.netio.mqtt.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.netio.rest.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.oi4.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.opcua.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.plc4x.modbus.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.plc4x.s7.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.ros.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.simulator.machine.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.file.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.http.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.httpserver.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.kafka.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.mqtt.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.nats.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.pulsar.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.rocketmq.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.tubemq.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connectors.ros.sink.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.qrcode.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.changedetection.jvm.welford.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.jseval.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.math.mathop.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.math.staticmathop.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.trigonometry.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.valuechange.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.compose.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.enrich.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.limit.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.merge.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.movingaverage.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.numericalfilter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.processor.booleanfilter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.project.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.schema.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.sdt.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.textfilter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.threshold.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.throughputmon.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.buffergeometry.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.bufferpoint.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.epsg.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latlngtojtspoint.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.reprojection.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.complex.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.simple.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversine.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversinestatic.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemaps.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemapsstatic.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.revgeocoder.geocityname.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.speedcalculator.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.count.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.increase.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.listcollector.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.listfilter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.numericalfilter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.topk.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.chunker.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.namefinder.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.partofspeech.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.tokenizer.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.logical.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.changed-value.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.count-array.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.datetime.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.duration-value.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.field-mapper.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.fieldhasher.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.fieldrename.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.measurementunitconverter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.staticmetadata.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.round.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.split-array.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.taskduration.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.jms.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.kafka.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.mqtt.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.nats.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.pulsar.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rest.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rocketmq.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.tubemq.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.websocket.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.ditto.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.couchdb.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.influxdb.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.iotdb.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.opcua.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.postgresql.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.redis.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.internal.jvm.datalake.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.internal.jvm.notification.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.email.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.msteams.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.onesignal.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.slack.md create mode 100644 website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.telegram.md create mode 100644 website-v2/versioned_docs/version-0.95.1/user-guide-first-steps.md create mode 100644 website-v2/versioned_docs/version-0.95.1/user-guide-for-quickstart.md create mode 100644 website-v2/versioned_docs/version-0.95.1/user-guide-tour.md create mode 100644 website-v2/versioned_sidebars/version-0.95.1-sidebars.json diff --git a/website-v2/versioned_docs/version-0.95.1/01_try-installation.md b/website-v2/versioned_docs/version-0.95.1/01_try-installation.md new file mode 100644 index 000000000..ea6b45d79 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/01_try-installation.md @@ -0,0 +1,62 @@ +--- +id: try-installation +title: Installation +sidebar_label: Installation +--- + +import DownloadSection from '@site/src/components/download/DownloadSection.tsx'; + +The easiest way to install StreamPipes is our Docker-based installation. For production-grade deployments, we also +recommend looking at our Kubernetes support, which is also part of the installation kit. + +## Prerequisites + +The Docker-based installation requires **Docker** and **Docker Compose** to be installed on the target machine. +Installation instructions can be found below. + +:::info Install Docker +Go to https://docs.docker.com/installation/ and follow the instructions to install Docker for your OS. Make sure +docker can be started as a non-root user (described in the installation manual, don’t forget to log out and in +again) and check that Docker is installed correctly by executing docker-run hello-world +::: + +### Supported operating systems + +The Docker-based installation supports the operating systems **Linux**, **Mac OS X** and **Windows 10 upwards**. Older windows +versions are not fully compatible with Docker. Linux VMs running under Windows might cause network problems with Docker, +therefore some manual work might be needed to make StreamPipes run properly. + +### Web Browser + +The StreamPipes application itself will be accessible through a web browser. We recommend a recent version of Chrome ( +best experience), Firefox or Edge. + +## Install StreamPipes + + + +## Setup StreamPipes + +Once you've opened the browser at the URL given above, you should see the StreamPipes application as shown below. At +initial startup, StreamPipes automatically performs an installation process. +After the installation has finished, continue by clicking on "Go to login +page", once all components are successfully configured. + +On the login page, enter your credentials, then you should be forwarded to the home page. + +Congratulations! You've successfully managed to install StreamPipes. Now we're ready to build our first pipeline! + + + +:::danger Errors during the installation process +In most cases, errors during the installation are due to an under-powered system. +If there is a problem with any of the components, please restart the whole system (`docker-compose +down` and eventually also delete the volumes). +Please also make sure that you've assigned enough memory available to Docker. +::: + +## Next Steps + +That's it! Have a look at the usage guide to learn how to use Apache StreamPipes. diff --git a/website-v2/versioned_docs/version-0.95.1/01_try-overview.md b/website-v2/versioned_docs/version-0.95.1/01_try-overview.md new file mode 100644 index 000000000..29a059d4a --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/01_try-overview.md @@ -0,0 +1,134 @@ +--- +id: user-guide-introduction +title: Apache StreamPipes Documentation +sidebar_label: Overview +--- + +This is the documentation of Apache StreamPipes. + +StreamPipes Overview + + +
+
+
+
+ 🚀 Try +
+
+
Your first steps with Apache StreamPipes:
+ Install StreamPipes 🔗 +
+
+
+
+
+
+ 💡 Concepts +
+
+
Learn about some general concepts of StreamPipes:
+ Overview 🔗 +
+
+
+
+
+
+ 🎓 Use +
+ +
+
+
+
+
+ 📚 Pipeline Elements +
+
+
Available pipeline elements in StreamPipes:
+ Adapters 🔗, + Data Processors 🔗, + Data Sinks 🔗 +
+
+
+
+
+
+ ⚡ Deploy +
+
+
How to set up StreamPipes in test and production environments:
+ Docker 🔗, + Kubernetes 🔗, + Use SSL 🔗 +
+
+
+ +
+
+
+ 🔧 Technicals +
+
+
Learn about technical concepts behind the curtain:
+ Architecture 🔗, + User Guidance 🔗 , + Runtime Wrappers 🔗, + Messaging 🔗, + Configuration 🔗 +
+
+
+
+
+
+ 👪 Community +
+
+
Get support and learn how to contribute to StreamPipes:
+ Get Help 🔗, + Contribute 🔗 +
+
+
+
+
+
+ 🐍 StreamPipes Python +
+
+
Discover what we offer for the Python world:
+ Python Documentation 🔗 +
+
+
+
diff --git a/website-v2/versioned_docs/version-0.95.1/01_try-tutorial.md b/website-v2/versioned_docs/version-0.95.1/01_try-tutorial.md new file mode 100644 index 000000000..c13d6f1dc --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/01_try-tutorial.md @@ -0,0 +1,20 @@ +--- +id: try-tutorial +title: Interactive Tutorial +sidebar_label: Interactive Tutorial +--- + +Once you've installed StreamPipes and see the home screen, you'll see a number of modules that are part of the StreamPipes toolbox. +As a first step, you might be interested in taking the interactive tutorial that helps you create your first pipeline. +Switch to the **Pipeline Editor** and you will see a dialog that asks you for the start of the interactive tutorial: + +Tutorial Welcome Page + +Click **Start Tour** to start the tour. In this tour, you'll build a simple pipeline that monitors (simulated) live data from a water tank system. +Within the tour, perform the actions as recommended and click **Next** to trigger the next steps. Some tour steps won't require to select **Next**, but wait for you to take the recommended action. +You can cancel the tour anytime by clicking the **Exit Tour** button. + +Tutorial Welcome Page + +Now after you've built your first pipeline, you might be interested in reading about some of our core [concepts](concepts-overview) + diff --git a/website-v2/versioned_docs/version-0.95.1/02_concepts-adapter.md b/website-v2/versioned_docs/version-0.95.1/02_concepts-adapter.md new file mode 100644 index 000000000..a94c38268 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/02_concepts-adapter.md @@ -0,0 +1,7 @@ +--- +id: concepts-adapter +title: Data Adapters +sidebar_label: Data Adapters +--- + +tbd diff --git a/website-v2/versioned_docs/version-0.95.1/02_concepts-data-streams.md b/website-v2/versioned_docs/version-0.95.1/02_concepts-data-streams.md new file mode 100644 index 000000000..329f9b908 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/02_concepts-data-streams.md @@ -0,0 +1,7 @@ +--- +id: concepts-data-streams +title: Data Streams +sidebar_label: Data Streams +--- + +tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/02_concepts-glossary.md b/website-v2/versioned_docs/version-0.95.1/02_concepts-glossary.md new file mode 100644 index 000000000..b401d1829 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/02_concepts-glossary.md @@ -0,0 +1,7 @@ +--- +id: concepts-glossary +title: Glossary +sidebar_label: Glossary +--- + +tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/02_concepts-overview.md b/website-v2/versioned_docs/version-0.95.1/02_concepts-overview.md new file mode 100644 index 000000000..f26f3cc07 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/02_concepts-overview.md @@ -0,0 +1,46 @@ +--- +id: concepts-overview +title: StreamPipes Concepts +sidebar_label: Overview +--- + +To understand how StreamPipes works, it is helpful to understand a few core concepts, which are illustrated below. +These encompass the entire data journey within StreamPipes: Starting with data collection ([adapters](#adapter)), +through data exchange ([data streams](#data-stream)) and data processing ([data processors](#data-processor) and [pipelines](#pipeline)), +to data persistence and distribution ([data sinks](#data-sink)). + +Overview of concepts + +## Adapter +An adapter connects to any external data source (e.g., OPC-UA, MQTT, S7 PLC, Modbus) and forwards the events it receives to the internal StreamPipes system. +Adapters can either be created by using a predefined adapter for a data source available in our marketplace [StreamPipes Connect](./03_use-connect.md). +An overview of all available adapters can be found under the menu bar **📚 Pipeline Elements**. +When you select one of these adapters, you can easily connect to the data source using an intuitive and convenient UI dialog (see the Connect section for more details). +Alternatively, you can define your own adapter by [using the provided Software Development Kit (SDK)](./06_extend-tutorial-adapters.md). +Creating an adapter is always the first step when you want to get data into StreamPipes and process it further. + +## Data Stream +**Data streams** are the primary source for working with events in StreamPipes. +A stream is an ordered sequence of events, where an event typically consists of one or more observation values and additional metadata. +The `structure` (or `schema` as we call it) of an event provided by a data stream is stored in StreamPipes' internal semantic schema registry. +Data streams are primarily created by adapters, but can also be created by a [StreamPipes Function](./06_extend-sdk-functions.md). + +## Data Processor +**Data processors** in StreamPipes transform one or more input streams into an output stream. +Such transformations can be simple, such as filtering based on a predefined rule, or more complex, such as applying rule-based or learning-based algorithms to the data. +Data processors can be applied to any data stream that meets the input requirements of a processor. +In addition, most processors can be configured by providing custom parameters directly in the user interface. +Processing elements define stream requirements, which are a set of minimum characteristics that an incoming event stream must provide. +Data processors can maintain state or perform stateless operations. + +## Data Sink +**Data sinks** consume event streams similar to data processors, but do not provide an output data stream. +As such, data sinks typically perform some action or trigger a visualization as a result of a stream transformation. +Similar to data processors, sinks also require the presence of specific input requirements from each bound data stream and can be customized. +StreamPipes provides several internal data sinks, for example, to generate notifications, visualize live data, or persist historical data from incoming streams. +In addition, StreamPipes provides several data sinks to forward data streams to external systems such as databases. + +## Pipeline +A pipeline in Apache StreamPipes describes the transformation process from a data stream to a data sink. +Typically, a pipeline consists of at least one data stream, zero or more data processors, and at least one data sink. +Pipelines are created graphically by users using the [Pipeline Editor](./03_use-pipeline-editor.md) and can be started and stopped at any time. diff --git a/website-v2/versioned_docs/version-0.95.1/02_concepts-pipeline.md b/website-v2/versioned_docs/version-0.95.1/02_concepts-pipeline.md new file mode 100644 index 000000000..3d2c5369b --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/02_concepts-pipeline.md @@ -0,0 +1,7 @@ +--- +id: concepts-pipelines +title: Pipelines +sidebar_label: Pipelines +--- + +tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/02_introduction.md b/website-v2/versioned_docs/version-0.95.1/02_introduction.md new file mode 100644 index 000000000..6dc71b384 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/02_introduction.md @@ -0,0 +1,85 @@ +--- +id: introduction +title: Introduction +sidebar_label: Introduction +--- + +## What is StreamPipes? + +Apache StreamPipes is a self-service Industrial IoT toolbox to enable non-technical users to connect, analyze and +explore IoT data streams. The main goal of StreamPipes is to help users bridging the gap between operational +technology (OT) and information technology (IT). This is achieved by providing a set of tools which help to make +industrial data accessible for downstream tasks such as data analytics and condition monitoring. +When working with industrial data and especially when building upon an open source stack for such tasks, users are often +faced with the management and integration of a variety of different tools for data connectivity, messaging & +integration, data enrichment, data storage, visualization and analytics. This results in an increasing operational +complexity and hardly manageable software stacks. + +Apache StreamPipes addresses this problem: It provides a complete toolbox with a variety of different tools to easily +gather data from OT systems such as Programmatic Logic Controllers (PLCs), industrial protocols (e.g., OPC-UA or +Modbus), IT protocols (e.g., MQTT) and others. Data is integrated in the form of live data streams. Based on connected +data, StreamPipes provides another module called the pipeline editor, which can be used to apply real-time analytics +algorithms on connected data stream. To this end, a library of pre-defined algorithms can be used. Out of the box, +StreamPipes provides more than 100 pipeline elements tailored at manufacturing data analytics. This includes simple +rule-based algorithms (e.g., flank detection, peak detection, boolean timers), as well as the possibility to integrate +more sophisticated ML-based algorithms. Finally, the pipeline editor allows to integrate with third-party systems by +using a variety of data sinks (e.g., to forward data to messaging brokers such as Apache Kafka, MQTT or RocketMQ, to +store data in databases such as PostgreSQL or Redis or to trigger notifications). Besides pipelines, an included data +explorer allows to visually analyze industrial IoT data. For this purpose, a number of visualizations are integrated +that allow non-technical users to quickly get first insights. Examples are correlations between several sensor values, +value heatmaps, distributions or time-series visualizations. Further tools include a dashboard used for real-time +monitoring, e.g., for visualizing live KPIs at shopfloor level. + +But StreamPipes is much more than just the user interface and an orchestration system for pipelines: It can be used as a +whole developer platform for Industrial IoT application. Apache StreamPipes is made for extensibility - it provides +several extension points, which allow the definition of custom algorithms, additional interfaces to third-party tools +and proprietary data sources. + +StreamPipes includes developer support for Java and Python, making it easy to integrate custom-trained machine learning +models into the data processing environment. With the built-in Python support, it is also possible to run online machine +learning methods directly on data streams gathered by StreamPipes. + +## Where does StreamPipes help? + +Being positioned in the industrial IoT domain, the overall goal of StreamPipes is to help manufacturing companies to +quickly build up an industrial IoT infrastructure and to analyse IIoT data without the need for manual programming. +Oftentimes, StreamPipes is compared to other tools in this area such as Node-RED for visually wiring of pipelines, which +is often used together with Grafana for data visualization and InfluxDB for time-series storage. The disadvantage of +such architectures is the system complexity beyond the first prototype, especially when it comes to production +deployments. Maintaining and securing multiple software instances is often a hard task requiring for substantial +development effort. In addition, implementing single-sign-on and providing a unified user experience is another hurdle. +This is where StreamPipes, as a single integrated tool with production-critical features such as access and role +management, provides many advantages. +StreamPipes has already a wide user range from the manufacturing domain. It helps users to quickly do the first steps +related to industrial analytics but can also be used for monitoring whole production facilities, analysing data streams +from multiple plants and sensors in real time using the integrated algorithm toolbox. Customization to individual use +cases is easy due to several extension points: + +* Software development kit for adapters, data processors and sinks: The functionality of StreamPipes can be extending by + using the integrated SDK. For instance, it is possible to integrate custom-tailored algorithms for proprietary sensors + or models into the toolbox. Additional algorithms and data sinks can be installed at runtime. +* Additional user interface plugins: StreamPipes allows to extend the default installation with additional UI views, + making use of a micro frontend approach. For instance, users can extend the system with custom-tailored views for a + specific machine or plant. Developers can use a platform API to communicate with the core StreamPipes instance. +* UI customization: To ensure a consistent look and feel, StreamPipes can be customized to the company’s corporate + identity. + +## How does StreamPipes technically work in a nutshell? + + +Overview StreamPipes Architecture + + +To foster extensibility, Apache StreamPipes is based on a microservice architecture as illustrated above. The main +services provided or used by StreamPipes are the a) user interface, b) the core, c) a time-series storage, d) a +publish/subscribe messaging layer and e) extensions services. Adapters are created over the user interface using an +intuitive configuration wizard and connect to the underlying source systems. Raw events coming from adapters can be +pre-processed (e.g., measurement unit conversions or datatype conversions). Afterwards, events are sent to the message +broker, which is the central backbone to provide IIoT data to internal and external applications. + +Besides adapters, extensions microservices can also integrate additional business logic in form of data processors and +data sinks. StreamPipes comes with over 100 built-in processors and sinks, covering basic use cases out-of-the-box. The StreamPipes core cares about orchestration of these pipeline elements and communicates with the user +interface. In addition, a time-series storage ensures persistence and can be used by any extensions service to write +data into the internal storage. The StreamPipes core provides a query interface to access historical data, which is, for +instance, used by the data explorer UI component. The user interface itself provides several built-in modules but can +also be extended with additional micro frontends. diff --git a/website-v2/versioned_docs/version-0.95.1/03_use-configurations.md b/website-v2/versioned_docs/version-0.95.1/03_use-configurations.md new file mode 100644 index 000000000..4aa953fbb --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/03_use-configurations.md @@ -0,0 +1,48 @@ +--- +id: use-configurations +title: Configurations +sidebar_label: Configurations +--- + +The configuration section is an admin-only interface for system-wide settings. + +## General configuration + +General configuration + +The general configuration serves to provide basic system settings. The basic settings allow to configure the app name (which is used, e.g., for mails sent by StreamPipes). +Additionally, the externally available host and port can be set which is used by the mail system to add links to emails. + +Furthermore, self-registration and password recovery features can be activated in this view. Note that both features require a working email configuration. + +## Datalake + +Datalake configuration + +Here, stored data lake databases can be truncated or deleted. The view also gives information on the number of data points currently stored in a measurement series. + +## Email configuration + +Email configuration + +In this section, the email configuration is set. The email configuration is used to send mails to users. Most standard mail server settings are supported. The configuration can be validated by triggering a test mail that is sent to a given recipient. + +## Messaging + +Messaging configuration + +Messaging configuration is used to control parameters used for communication between pipeline elements. Individual Kafka settings can be configured, as well as the priority of selected message formats and protocols during pipeline creation. + +## Pipeline Element Configuration + +Pipeline element configuration + +Individual configurations of extensions services are available in this view. The available configurations depend on the provided configuration variables in the service definition of each extensions service. + +## Security + +Messaging configuration + +The security configuration allows to manage existing user accounts, service accounts and groups. New users can be added and roles can be assigned. + +Please also read more about security [here](05_deploy-security.md). diff --git a/website-v2/versioned_docs/version-0.95.1/03_use-connect.md b/website-v2/versioned_docs/version-0.95.1/03_use-connect.md new file mode 100644 index 000000000..ba8146481 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/03_use-connect.md @@ -0,0 +1,72 @@ +--- +id: use-connect +title: StreamPipes Connect +sidebar_label: StreamPipes Connect +--- + +StreamPipes Connect is the module to connect external data sources with Apache StreamPipes directly from the user interface. +StreamPipes Connect offers various adapters for common communication protocols and some specific sensors. Besides connecting data, StreamPipes Connect offers ways to pre-process data without the need to build pipelines and integrates a schema guesser that listens for incoming data and recommends the recognized event schema. + +The screenshot below illustrates the data marketplace, which shown after navigating to "StreamPipes Connect" and then clicking the "New adapter" button at the top. + +StreamPipes Connect Overview + +## Connecting new data sources + +### Data Marketplace +The data marketplace shows a list of all adapters that are currently installed in Apache StreamPipes. Each adapter offers various configuration options which depend on the specifics of the adapter. +Adapters are distinguished a) by the data source concept they provide (data set or data stream) and b) the adapter type, where we distinguish between _generic adapters_, which usually implement a generic communication protocol such as MQTT or Apache Kafka or a specific sensor interface (e.g., for Netio power sockets). +Several filter options are available to find a suitable adapter. The configuration of a new adapter starts with selecting one of the available adapters, which starts an assistant that supports the adapter generation. + +### Protocol/Basic Settings +In the first step, basic configurations need to be provided. For instance, for an Apache PLC4X adapter, the IP address of the PLC needs to be provided. In this example, we provide basic settings for connecting to an Apache Kafka broker. After all values are provided, the "Next" button opens the next step. + +StreamPipes Connect Basic Settings + +### Format Specification +The next step, format generation, is only available for generic adapters which support different message formats to be sent over the corresponding protocol. Think of a message broker that is able to consume messages in both JSON format or binary format. +Currently supported formats include XML, various JSON representations, images and CSV. After a format has been selected, further format configurations can be provided (depending on the selected format) to further customize the incoming message format. + +StreamPipes Connect Format Selection + +### Schema Editor +In the next step, based on the previously provided protocol and format settings, the system will either provide the fixed/pre-defined schema of the adapter or, in case of specific adapters, will connect to the underlying system and try to listen for incoming data. After a few seconds, the schema editor will appear that provides a list of detected fields from the incoming events (the schema). + +StreamPipes Connect Schema Editor + +In the toolbar, several configuration options are available which transform the original schema: + +* **Add Nested Property**. This option allows to modify the structure of the event by creating a nested structure. The schema can be simply changed by dragging and dropping fields into the nested structure. +* **Add Static Value**. This option allows to add a field containing a static value (e.g., an identifier) to the event. +* **Add Timestamp**. This options appends the current timestamp to each incoming event, useful in case the timestamp is not provided by the origin. +* **Refresh**. Re-triggers the schema guessing. +* **Delete field**. Select one or more fields by clicking the checkbox on the right and trigger the delete button. +* **Property scope**. For each field, a property scope can be defined which is either _Measurement_, _Dimension_ or _Header_. These values are later be used in the pipeline editor to assist in configuring pipeline elements and do not have any functional consequence. +Use _Measurement_ to indicate the field measures a value (e.g., a temperature value from a sensor), use _Dimension_ for any identifier (e.g., the sensor ID) and use _Header_ for any other metadata such as timestamps. + +For each field (also called event property) of the schema, additional configuration options are available by clicking the _Edit_ button: + +* **Label**. Used to provide a human-readable label for the field, which will ease the identification of fields when building pipelines. +* **Runtime Name.** This is the identifier of the field in the underlying message representation format (e.g., the JSON key). Renaming the runtime name will trigger a so-called _transformation rule_ which renames the incoming field name to the new field name before forwarding it to StreamPipes. +* **Domain Property/Semantic Type**. To help StreamPipes better understand the value which is represented by the field, semantic type information can be given. Up from StreamPipes 0.68.0, the semantic type can be selected from a wide range of available options. Additionally, an URL can be manually provided that indicates the meaning of the value (e.g., http://schema.org/Temperature). +* **Mark as Timestamp**. Indicates that the selected value represents a timestamp. When selected, a _timestamp converter_ can be configured which will convert incoming timestamps to the UNIX timestamp. +* **Runtime Type**. Here, the data type can be changed +* **Unit**. Allows to specify the unit in which the value is measured. Once selected, you can also automatically convert the unit to a target unit, which will then be inserted into the data stream produced by the adapter (see screenshot below). + +StreamPipes Connect Unit Conversion + +Assigning a timestamp is mandatory and can be either done by adding a timestamp from the menu, or by choosing an existing field and marking it as timestamp. + +### Adapter Generation +Finally, the adapter is ready to be started. In the _Adapter Generation_ page, a name and description for the resulting data stream must be provided. +Once started, StreamPipes creates your new adapter and displays a preview of the connected data, which refreshes about once per second. +Afterwards, the newly created data stream is available in the pipeline editor for further usage. + +StreamPipes Connect Adapter Generation + +## Managing adapters + +Currently running adapters are available in the "Running adapters" section of StreamPipes Connect. Existing adapters can be stopped and deleted. Currently, there is no mechanism to edit an existing adapter or to stop the adapter without deleting it. + +### Adapter Templates +For frequently used configurations, adapter templates can be created. An adapter template is a pre-configured adapter which can be further customized by users. Created adapter templates are available in the marketplace similar to standard adapters. diff --git a/website-v2/versioned_docs/version-0.95.1/03_use-dashboard.md b/website-v2/versioned_docs/version-0.95.1/03_use-dashboard.md new file mode 100644 index 000000000..339bf6b57 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/03_use-dashboard.md @@ -0,0 +1,66 @@ +--- +id: use-dashboard +title: Live Dashboard +sidebar_label: Live Dashboard +--- + +The live dashboard can be used to visualize live data of data streams using a set of visualizations +The entry page of the live dashboard lists all created dashboards as in the screenshot below: + +StreamPipes Dashboard Overview + +## Visualizing Data Streams + +To visualize data streams in the live dashboard, a pipeline must be created that makes use of the so-called **Data Lake** sink. +Any data stream or data processor can serve as an input of the data lake sink. Switch to the pipeline editor, create a pipeline and configure the data lake sink. The visualization name is used to identify the sink in case multiple data lake sinks are used within a single pipeline. + +## Managing Dashboards +Multiple dashboards can be created, e.g., to organize different assets in a single dashboard view. + +A new dashboard can be created by clicking the _New Dashboard_ button, which opens a dialog that requires basic dashboard settings such as the title and description of the new dashboard. +Once created, the dashboard will be shown in the overview. Here, the following dashboard actions are available: + +* **Show** opens the dashboard. +* **Window** opens the dashboard in a new window with reduced controls, e.g., without the StreamPipes navigation and toolbar. This is a useful view for standalone displays that should visualize key parameters. +* **Settings** allows to modify the basic dashboard settings. +* **Edit** opens the dashboard in edit mode, where widgets can be added to the dashboard. ++ **Delete** deletes the selected dashboard. + +## Creating Visualizations + +Visualizations can be added to each dashboard in form of widgets. To add new visualizations, switch to the dashboard in _Edit_ mode. +In edit mode, a button appears that allows to add a new visualization. + +Adding a new visualization is supported by a wizard consisting of three steps: + +StreamPipes Dashboard Pipeline Selection + +* **Select pipeline** is the first step where a pipeline is selected on which the visualization is based. In this view, all pipelines are listed that have at least one **Dashboard Sink**. In case a pipeline contains multiple data lake sinks, the visualization name is listed below the pipeline name which eases discovering of the proper visualization. +* **Select widget** is the next step where the visualization widget must be selected. StreamPipes automatically filters this list based on input requirements of widgets. For instance, image visualizations are only visible if the input data stream provides an image object. +* **Configure widget** provides widget-specific settings to configure the visualization. In most cases, colors and titles of widgets can be modified. Additionally, chart-specific settings such as axis value ranges can be configured. + +StreamPipes Dashboard Widget Configuration + +By clicking _Create_, the new widget is placed on the canvas. Size and positioning of visualizations can be flexibly changed based on the provided grid. To change the widget configuration, the _Settings_ button of each widget can be clicked to re-open the configuration dialog. + +Once created, the dashboard provides a live view of all visualizations: + +StreamPipes Live Dashboard + + +Before the dashboard is closed, make sure to click the _Save_ button to persist the updated dashboard. Changes can be discarded by clicking the _Discard_ button. + + +## Available widgets + +The following visualizations are available in the latest release: + +* Area Chart +* Gauge +* HTML page (renders HTML markup) +* Image +* Line Chart +* Raw (displays the raw JSON input for debugging purposes) +* Single Value (displays a single measurement) +* Table +* Traffic Light diff --git a/website-v2/versioned_docs/version-0.95.1/03_use-data-explorer.md b/website-v2/versioned_docs/version-0.95.1/03_use-data-explorer.md new file mode 100644 index 000000000..f84323bf6 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/03_use-data-explorer.md @@ -0,0 +1,102 @@ +--- +id: use-data-explorer +title: Data Explorer +sidebar_label: Data Explorer +--- + +The data explorer can be used to visualize and explore data streams that are persisted by using the **Data Lake** sink. + +StreamPipes Data Explorer Overview + +It provides a canvas (i.e. a data view) where various visualizations from multiple pipelines can be placed. For each data view, you can set a date and time range for the configured visualizations. + +## Using the data explorer + +### Get the data + +In the data explorer, any pipeline that uses the so-called **Data Lake** sink can be explored in the data explorer. Switch to the pipeline editor and add the data lake sink to a data processor or stream. +The sink requires an index name as a configuration parameter, which is used as an identifier in the data explorer. + +### Data Views & Widgets + +After your data is stored in the data lake, you can switch over to the data-explorer tab to create a novel data view and the widgets of your choice. In StreamPipes, a data view organizes a set of related widgets (i.e. data visualizations or plots) and gets assigned a single date and time range. The standard date and time range consists of the last 15 minutes of the current date and time. You can select predefined ranges (e.g. day or month) or configure the exact date and time range you want to explore. + +StreamPipes Data Explorer Component + +First create and name your data view and select the edit icon to proceed. In your data view, you can now add a new widget congiguration (plus icon) to configure and create your first widget. The widget configuration consists of (i) data, where the individual data sources in the data lake are selected, the properties for the widget are chosen and filters on the data sources are defined and applied, (ii) visualization, where the type of widget is chosen and the respective configuration for the widget type is done and (iii) appearance, where general style configurations for the widget (such as background color) can be performed. + +### Data Configuration + +The data configuration is the first step to define your widget. You can add several data sources (i.e. data sinks) and need to configure each added data source individually. This gives you sufficient freedom to combine the needed information, potentially consisting of different data resolutions, filters or types of information. + +StreamPipes Data Explorer Data Configuration + +After selecting the initial data source, you can choose if the underlying data query is to be performed raw, aggregated or single. Raw queries refer to using the data as-is, where you can define a limit on the number of events to guarantee performant usage in the application. In aggregated mode, you can choose among predefined aggregation granularites (e.g. day, minute, second). + +In the next step, you can choose the fields (i.e. properties of your data source) you are interested in exploring. If you selected aggregation or single mode, you can also modify the type of aggregation to be performed on the selected property. + +You can also filter your data source by adding conjunctive conditions. + +### Visualization Configuration + +The visualization configuration is dependent on the visulization type, which needs to be selected first. The data-explorer currently supports the following types: + +#### Table + +The table view formats the selected properties in table format. + +StreamPipes Data Explorer Table + +#### Map + +The map allows to visualize and explore coordinates on the world map. The configuration requires to choose the property which comprises the coordinates, allows to choose the marker style, a zoom level as well as the tooltip content. + +StreamPipes Data Explorer Map + +#### Heatmap + +The heatmap widget visualizes data in terms of the available intensity, where higher values are interpreted as being more intense. You only need to select the property which you want to visualize. Note that it might be interesting to aggregate the data in the data configuration to get more insights in your heatmap. + +StreamPipes Data Explorer Heatmap + +#### Time Series + +The time series widget allows you to do exploration and analysis for your numerical and boolean data properties. You can easily visualize your data properties in various styles (i.e. scatter, line, scattered line, bar or symbol) and colors, and configure a second y-axis for better interpretation of varying property ranges. + +StreamPipes Data Explorer Time Series 1 + +StreamPipes Data Explorer Time Series 2 + +StreamPipes Data Explorer Time Series 3 + +#### Image + +The image widget enables to integrate and visualize your image data. + +#### Indicator + +The indiator widget lets you visualize a single numerical value as well as (optionally) the delta to another indicator. You only need to configure the respective properties. + +StreamPipes Data Explorer Indicator + +#### 2D Correlation + +The correlation plot currently supports analyzing the relationship of two properties. Once selected, you can choose between a scatter view of the plotted data points or directly extract correlations in a density chart. + +StreamPipes Data Explorer Correlation 1 + +StreamPipes Data Explorer Correlation 2 + +#### Distribution + +In the distribution widget, you can quickly get an overview of your data range and common data values. You can either choose a histrogram view, where a bar chart is used to show data the frequency of automatically extracted data ranges or a pie view, where you can also select the granularity of how your data is clustered in terms of frequency. + +StreamPipes Data Explorer Distribution 1 + +StreamPipes Data Explorer Distribution 2 + +### Appearance Configuration + +Finally, you can change the title of your created widget as well as background and text colors in the appearance configuration. + +StreamPipes Data Explorer Appearance diff --git a/website-v2/versioned_docs/version-0.95.1/03_use-install-pipeline-elements.md b/website-v2/versioned_docs/version-0.95.1/03_use-install-pipeline-elements.md new file mode 100644 index 000000000..852693200 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/03_use-install-pipeline-elements.md @@ -0,0 +1,9 @@ +--- +id: use-install-pipeline-elements +title: Install Pipeline Elements +sidebar_label: Install Pipeline Elements +--- + +## Install Pipeline Elements + +(coming soon) diff --git a/website-v2/versioned_docs/version-0.95.1/03_use-managing-pipelines.md b/website-v2/versioned_docs/version-0.95.1/03_use-managing-pipelines.md new file mode 100644 index 000000000..2c64b53ee --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/03_use-managing-pipelines.md @@ -0,0 +1,52 @@ +--- +id: use-managing-pipelines +title: Managing Pipelines +sidebar_label: Managing Pipelines +--- + +The pipeline view lists all created pipelines and provides several views and actions to manage the lifecycle of pipelines. + +In the entry screen, an overview of all created pipelines is shown: + +StreamPipes Pipeline Overview + +## Pipeline Actions +Within the pipeline overview, for each pipeline several actions are available: +* **Start/Stop pipeline** Starts or stops the selected pipeline. Once clicked, StreamPipes will trigger the selected action for all pipeline elements and open a success or error dialog as illustrated below. +* **Show details** opens the pipeline detail view (see below). +* **Modify pipeline** opens the pipeline in the pipeline editor, where the pipeline can be modified. Note that this button is only visible if the pipeline is not running. +* **Delete pipeline** opens a confirm dialog, which subsequently deletes the selected pipeline. + +The screenshot below shows the status of a pipeline after it has been successfully started. By clicking the _Show details_ button, more information on the status of each corresponding pipeline element microservice becomes available. In case of failures, the failure reason will be shown for each pipeline element that has failed to start. + +StreamPipes Pipeline Start Dialog + +## Organizing Pipelines into Categories +Pipelines can be organized into categories, which is a useful feature in case a larger amount of pipelines is created. +All categories will be shown as separate tabs in the pipeline overview. The same pipeline can be assigned to multiple categories. + +To add a new category or to add a new pipeline to an existing category, click the _Manage Categories_ button and configured the category and assigned pipelines in the dialog. + +## Pipeline Details +The pipeline details view can be opened by clicking the _Show details_ button in the pipeline overview panel. + +StreamPipes Pipeline Details + +### Overview +The overview section displays the graphical structure of the pipeline and provides some statistics about recent pipeline actions. Additionally, pipelines can be directly started, stopped, modified and deletes within this view. + +### Monitoring +Monitoring features will become available in version 0.68.0. + +### Errors +Monitoring of failures and logs will become available in version 0.69.0. + +### QuickEdit +The quick edit feature (only available for pipelines that are not running) is a quick and convenient way to modify some pipeline element configurations without opening the pipeline in the pipeline editor. +To use the quick edit feature, switch to the _QuickEdit_ tab, which will display the selected pipeline. + +By clicking a pipeline element from the preview canvas, available configuration options of the selected pipeline element can be modified. Note that only modifications that do not affect the pipeline structure (e.g., different output streams) can be changed. + +StreamPipes Pipeline Quick Edit + +After a configuration value was changed, make sure to click the _Update Pipeline_ button to save the changes. diff --git a/website-v2/versioned_docs/version-0.95.1/03_use-notifications.md b/website-v2/versioned_docs/version-0.95.1/03_use-notifications.md new file mode 100644 index 000000000..b5c64ed98 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/03_use-notifications.md @@ -0,0 +1,25 @@ +--- +id: use-notifications +title: Notifications +sidebar_label: Notifications +--- + +The notification module can be used to create internal notifications. + +StreamPipes Notifications + +## Using notifications + +Any pipeline that includes the data sink **Notification** can trigger notifications that appear in the notification view. To configure a new notification, switch to the pipeline editor and append the notification sink to a data processor or data stream. +The sink requires a title and message as configuration parameters. + +### Placeholders + +The notification message can include placeholders for fields which are replaced with the actual value at runtime. + +## Managing notifications + +The notification view is split into two parts. The left sides lists all pipelines which include a notification sink. By selecting a pipeline, available notifications will be shown in the right panel. +By scrolling up, older notifications become visible. Notifications that have appeared in the detail view will be automatically marked as read, so that only new, unread notifications will appear in the left toolbar. + + diff --git a/website-v2/versioned_docs/version-0.95.1/03_use-pipeline-editor.md b/website-v2/versioned_docs/version-0.95.1/03_use-pipeline-editor.md new file mode 100644 index 000000000..f09cf8486 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/03_use-pipeline-editor.md @@ -0,0 +1,62 @@ +--- +id: use-pipeline-editor +title: Pipeline Editor +sidebar_label: Pipeline Editor +--- + +The pipeline editor module supports building pipelines that transform a data stream using a set of resuable data processors and data sinks. +The empty pipeline editor looks similar to the illustration below after a new installation. + +StreamPipes Pipeline Editor Overview + +## Pipeline Elements +The four main concepts data sets, data streams, data processors and data sinks are available at the top of the pipeline editor. By switching the tabs, the individual pipeline elements for each category can be found. +By clicking the questionmark symbol, which appears when hovering over an element, additional information can be viewed (e.g., for data streams a live preview of incoming data and the documentation of the pipeline element for data processors and sinks). + +StreamPipes Pipeline Element Info + +## Creating Pipelines +Pipelines are built by dragging data streams, processors and sinks into the pipeline assembly area. Typically, a pipeline is built step-by-step starting with a data soure (stream or set). +Afterwards, data processors and sinks are subsequently added to the pipeline. Connections between pipeline elements are made by selecting the gray connector of the source and moving it to the target pipeline element. +Once a connection is made, StreamPipes performs a quick validation step and, in case two pipeline elements are compatible, automatically opens a configuration window. + +### Configuring Pipeline Elements +The configuration depends on the selected pipeline element and looks similar to the screenshot below. +In general, pipeline elements are configured by providing the required values. Once the pipeline element is fully configured, the _Save_ button activates and can be used to save the configuration for the pipeline element. + +StreamPipes Pipeline Element Configuration + +In addition, the following options are available in the pipeline element configuration menu: +* **Show documentation** extends the view and displays the pipeline element's documentation next to the configuration view. +* **Show only recommended settings** filters the list of available fields provided by the connected input data stream based on the _property scope_, e.g., so that only measurement values are displayed and dimension fields from the input stream are not available for selection. If deactivated, selections contain the full list of available fields that match the input requirement of the data processor. + +### Pipeline Element Options +Further options for a pipeline element can be displayed by hovering over a pipeline element in the assembly area, so that additional buttons appear around the pipeline element: + +* **Configure element** re-opens the configuration view to update the pipeline element configuration (only available for data processors and sinks) +* **Delete element** removes the pipeline element from the pipeline +* **Help** opens the pipeline element's documentation +* **Compatible element** opens a dialog which shows all pipeline elements that are compatible to the current element's output data stream. The dialog offers an alternative to selecting pipeline elements directly from the pipeline element selection in the top. +* **Pipeline Element Recommendation** opens a dialog which shows all recommended pipeline elements that are compatible the current element's output data stream. The recommendation is based on previously connected pipeline elements and is displayed below. + +### Pipeline Editor Options +Several pipeline editor options are available in the menu bar of the pipeline assembly: + +StreamPipes Pipeline Editor Options + +* **Save pipeline** opens the save dialog (see below) +* **Pan** allows to pan within the assembly area, useful for larger pipelines that do not fit in the screen +* **Select** is visible if pan mode is active and switches back to the default select mode +* **Zoom in/out** triggers the zoom in the pipeline assembly +* **Auto Layout** layouts the pipeline in a much more beautiful way than you are able to do by yourself ;-) +* **All pipeline modification saved** is displayed if the current pipeline has been cached. Cache updates are triggered after every change of the pipeline so that changes are not lost after reloading the window. +* **Hints** are shown to display current errors (e.g., incomplete pipelines). Details can be opened by clicking the hint button. +* **Clear assembly** clears the assembly and removes the current pipeline. + +### Saving a pipeline +To save a pipeline, press the _save pipeline_ button. A dialog pops up where a name and description of the pipeline can be entered (only name is mandatory). +Additionally, a pipeline can be directly started after it has been stored by checking the corresponding button. + +StreamPipes Save Pipeline Dialog + + diff --git a/website-v2/versioned_docs/version-0.95.1/05_deploy-choosing-the-right-flavor.md b/website-v2/versioned_docs/version-0.95.1/05_deploy-choosing-the-right-flavor.md new file mode 100644 index 000000000..a140bf46e --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/05_deploy-choosing-the-right-flavor.md @@ -0,0 +1,47 @@ +--- +id: choosing-the-right-flavor +title: Choosing the right flavor +sidebar_label: Service selection options +--- + + +## Introduction + +StreamPipes comes with many different options to customize a deployment. This section introduces the various options you can choose from when installing StreamPipes. + +You can choose between various **deployment modes**, choose from two different core packages and several extension packages, wich are described below. + +## Deployment Mode + +For the deployment model, you choose between a standard multi-container `Docker-Compose` installation and the `Kubernetes` installation. +we provide several `Docker-Compose` files for the various options shown here and a `helm chart`. +See [Docker Deployment](05_deploy-docker.md) and [Kubernetes Deployment](05_deploy-kubernetes.md) for more details. + +### Running StreamPipes in a non-containerized environment + +Of course, it is also possible to launch StreamPipes in a non-containerized environment. +You will need to build your own executable binaries by running `mvn package`. +In addition, it is required to install the required 3rd party services (see [Architecture](07_technicals-architecture.md)) and configure the environment variables as described in [Environment Variables](05_deploy-environment-variables.md). + +## Core Service + +We provide two different pre-packaged versions of core services. The default `streampipes-service-core` is a packaged JAR file which includes client libraries for the various messaging systems StreamPipes supports at the cost of a larger file size. +In case you plan to run StreamPipes on less resource-intensive hardware, we recommend to switch to the `streampipes-service-core-minimal` package, which only includes support for MQTT and NATS, but has a smaller file size and slightly improved startup performance. + +## Extension Services + +Similar to the core, we provide several pre-packaged extension services which differ mainly by their file size, number of supported adapters and pipeline elements and messaging systems. + +The following packages exist: + +* `streampipes-extensions-all-jvm` is the largest package and includes all official StreamPipes adapters and pipeline elements. It also includes support for all messaging systems Streampipes currently supports. +* `streampipes-extensions-all-iiot` is a subset of the aforementioned package and excludes adapters and pipeline elements which are often not relevant for IIoT use cases. For instance, the package excludes text mining-related pipeline elements. +* `streampipes-extensions-iiot-minimal` is a subset of the aforementioned package and includes only support for the lightweight messaging systems MQTT and NATS. + +Generally said, in cases where you plan to deploy StreamPipes on a resource-limited edge device, we recommend a combination of the `streampipes-service-core-minimal` and `streampipes-extensions-iiot-minimal` package. This could, for instance, be a device with less than 4GB memory. +In other cases, it depends on the use case and if you need all adapters and pipeline elements or are ok with the IIoT-related extensions. + +## Messaging System + +StreamPipes can be configured to use different messaging systems for exchanging events between adapters and pipeline elements. +The section [Messaging](07_technicals-messaging.md) includes detailed information on the configuration of messaging systems. diff --git a/website-v2/versioned_docs/version-0.95.1/05_deploy-docker.md b/website-v2/versioned_docs/version-0.95.1/05_deploy-docker.md new file mode 100644 index 000000000..881e16b43 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/05_deploy-docker.md @@ -0,0 +1,104 @@ +--- +id: deploy-docker +title: Docker Deployment +sidebar_label: Docker Deployment +--- + +StreamPipes Compose is a simple collection of user-friendly `docker-compose` files that easily lets gain first-hand experience with Apache StreamPipes. + +> **NOTE**: We recommend StreamPipes Compose to only use for initial try-out and testing. If you are a developer and +> want to develop new pipeline elements or core feature, use the [StreamPipes CLI](06_extend-cli.md). + +#### TL;DR: A one-liner to rule them all :-) + +```bash +docker-compose up -d +``` +Go to http://localhost to finish the installation in the browser. Once finished, switch to the pipeline editor and start the interactive tour or check the [online tour](https://streampipes.apache.org/docs/docs/user-guide-tour/) to learn how to create your first pipeline! + +## Prerequisites +* Docker >= 17.06.0 +* Docker-Compose >= 1.17.0 (Compose file format: 3.4) +* Google Chrome (recommended), Mozilla Firefox, Microsoft Edge + +Tested on: **macOS, Linux, Windows 10 upwards** (CMD, PowerShell, GitBash) + +**macOS** and **Windows** users can easily get Docker and Docker-Compose on their systems by installing **Docker for Mac/Windows** (recommended). + +> **NOTE**: On purpose, we disabled all port mappings except of http port **80** to access the StreamPipes UI to provide minimal surface for conflicting ports. + +## Usage +We provide several options to get you going: + +- **default**: Default docker-compose file, called `docker-compose.yml`. +- **nats**: The standard installation which uses Nats as message broker,called `docker-compose.nats.yml`. +- **full**: Contains experimental Flink wrappers, called `docker-compose.full.yml`. +- **quickstart**: Contains predefined example assets, called `docker-compose.quickstart.yml`. The Quickstart mode is a user-friendly feature which comes with predefined example assets like pipelines, dashboards, and data views. These ready-to-use components allow first-time users to get a feel of StreamPipes in IIoT with ease, serving as a practical demonstration of how StreamPipes can be utilized for efficient monitoring and analysis. We highly recommend first-time users to begin with the Quickstart mode to understand the simplicity and convenience that StreamPipes brings to the IIoT platform. Please follow the [User Guide for Quickstart Mode](user-guide-for-quickstart.md) if you want to explore it. + + +:::info + +Other options include configurations for the internally used message broker. The current default is `Kafka`, but you can also start StreamPipes with `Nats`, `MQTT` or `Apache Pulsar`. +Use one of the other provided docker-compose files. + +::: + +**Starting** the **default** option is as easy as simply running: +> **NOTE**: Starting might take a while since `docker-compose up` also initially pulls all Docker images from Dockerhub. + +```bash +docker-compose up -d +# go to `http://localhost` after all services are started +``` +After all containers are successfully started just got to your browser and visit http://localhost to finish the installation. Once finished, switch to the pipeline editor and start the interactive tour or check the [online tour](https://streampipes.apache.org/docs/docs/user-guide-tour/) to learn how to create your first pipeline! + +**Stopping** the **default** option is similarly easy: +```bash +docker-compose down +# if you want to remove mapped data volumes, run: +# docker-compose down -v +``` + +Starting the **nats** option is almost the same, just specify the `docker-compose.nats.yml` file: +```bash +docker-compose -f docker-compose.nats.yml up -d +# go to `http://localhost` after all services are started +``` +**Stopping** the **nats** option: +```bash +docker-compose -f docker-compose.nats.yml down +``` + + +Starting the **full** option is almost the same, just specify the `docker-compose.full.yml` file: +```bash +docker-compose -f docker-compose.full.yml up -d +#go to `http://localhost` after all services are started +``` +Stopping the **full** option: +```bash +docker-compose -f docker-compose.nats.yml down +#docker-compose -f docker-compose.nats.yml down -v +``` +Starting the **quickstart** option: +```bash +docker-compose -f docker-compose.quickstart.yml build script-runner +docker-compose -f docker-compose.quickstart.yml up -d +#go to `http://localhost` after all services are started +``` +Stopping the **quickstart** option: +```bash +docker-compose -f docker-compose.quickstart.yml down +``` + +## Update services +To actively pull the latest available Docker images use: +```bash +docker-compose pull +``` + +## Upgrade +To upgrade to another StreamPipes version, simply edit the `SP_VERSION` in the `.env` file. +``` +SP_VERSION= +``` diff --git a/website-v2/versioned_docs/version-0.95.1/05_deploy-environment-variables.md b/website-v2/versioned_docs/version-0.95.1/05_deploy-environment-variables.md new file mode 100644 index 000000000..c4066fb70 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/05_deploy-environment-variables.md @@ -0,0 +1,88 @@ +--- +id: deploy-environment-variables +title: Environment Variables +sidebar_label: Environment Variables +--- + +## Introduction + +A StreamPipes installation can be configured in many ways by providing environment variables. +The following lists describe available environment variables along with a description. + +## StreamPipes Core Service + +### Internal + +| Env Variable Name | Default Value | Description | +|--------------------------------|---------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------| +| SP_DEBUG | false | Should only be set for local development to reroute traffic to localhost | +| SP_INITIAL_ADMIN_EMAIL | admin@streampipes.apache.org | Installation-time variable for defining the default user name | +| SP_INITIAL_ADMIN_PASSWORD | admin | Installation-time variable for defining the default user password | +| SP_INITIAL_SERVICE_USER | sp-service-client | Installation-time variable for defining the initial service user (must be same to the configured user in the extension service) | +| SP_INITIAL_SERVICE_USER_SECRET | my-apache-streampipes-secret-key-change-me | Installation-time variable for defining the initial service secret (minimum 35 chars) | +| SP_JWT_SECRET | Empty for Docker, Auto-generated for K8s | JWT secret, base64-encoded, minimum 256 bits | +| SP_JWT_SIGNING_MODE | HMAC | HMAC or RSA, RSA can be used to authenticate Core-Extensions communication | +| SP_JWT_PRIVATE_KEY_LOC | Empty | Required id SP_JWT_SIGNING_MODE=RSA, path to the private key, can be generated in the UI (Settings->Security->Generate Key Pair) | +| SP_ENCRYPTION_PASSCODE | eGgemyGBoILAu3xckolp for Docker, Auto-generated for K8s | Encryption passcode for `SecretStaticProperties` | +| SP_PRIORITIZED_PROTOCOL | kafka | Messaging layer for data exchange between extensions | + + +### Third-party services + +| Env Variable Name | Default Value | Description | +|------------------------|---------------|---------------------------------------------------------------------------| +| SP_COUCHDB_HOST | couchdb | The hostname or IP of the CouchDB database | +| SP_COUCHDB_PROTOCOL | http | The protocol (http or https) of the CouchDB database | +| SP_COUCHDB_PORT | 5984 | The port of the CouchDB database | +| SP_COUCHDB_USER | admin | The user of the CouchDB database (must have permissions to add databases) | +| SP_COUCHDB_PASSWORD | admin | The password of the CouchDB user | +| SP_TS_STORAGE_HOST | influxdb | The hostname of the timeseries storage (currently InfluxDB) | +| SP_TS_STORAGE_PORT | 8086 | The port of the timeseries storage | +| SP_TS_STORAGE_PROTOCOL | http | The protocol of the timeseries storage (http or https) | +| SP_TS_STORAGE_BUCKET | sp | The InfluxDB storage bucket name | +| SP_TS_STORAGE_ORG | sp | The InfluxDB storage org | +| SP_TS_STORAGE_TOKEN | sp-admin | The InfluxDB storage token | + +The InfluxDB itself can be configured by providing the variables `DOCKER_INFLUXDB_INIT_PASSWORD` and `DOCKER_INFLUXDB_INIT_ADMIN_TOKEN`. See the `docker-compose` file for details. + +## StreamPipes Extensions Service + +### Internal + +| Env Variable Name | Default Value | Description | +|--------------------------------|--------------------------------------------|--------------------------------------------------------------------------------------------------------------------| +| SP_CLIENT_USER | Empty | Service account for communication with Core | +| SP_CLIENT_SECRET | Empty | Service secret for communication with Core | +| SP_EXT_AUTH_MODE | sp-service-client | When set to AUTH: all interfaces are only accessible with authentication (requires SP_JET_PRIVATE_KEY_LOC in Core) | +| SP_JWT_PUBLIC_KEY_LOC | my-apache-streampipes-secret-key-change-me | Path to the public key of the corresponding SP_JWT_PRIVATE_KEY defined in Core | + +### Third-party services + +The following variables are only required for extensions which require access to the internal time-series storage (the `Data Lake Sink`). + +| Env Variable Name | Default Value | Description | +|------------------------|---------------|---------------------------------------------------------------------------| +| SP_TS_STORAGE_HOST | influxdb | The hostname of the timeseries storage (currently InfluxDB) | +| SP_TS_STORAGE_PORT | 8086 | The port of the timeseries storage | +| SP_TS_STORAGE_PROTOCOL | http | The protocol of the timeseries storage (http or https) | +| SP_TS_STORAGE_BUCKET | sp | The InfluxDB storage bucket name | +| SP_TS_STORAGE_ORG | sp | The InfluxDB storage org | +| SP_TS_STORAGE_TOKEN | sp-admin | The InfluxDB storage token | + + +## Recommended variables + +For a standard deployment, it is recommended to customize the following variables: + +* Initiales Admin-Passwort (SP_INITIAL_ADMIN_PASSWORD, Core) +* Initiales Client Secret (SP_INITIAL_SERVICE_USER_SECRET, Core) +* Client Secret Extensions (SP_CLIENT_USER, Extensions) +* Encryption Passcode (SP_ENCRYPTION_PASSCODE, Core) +* CouchDB-Password (SP_COUCHDB_PASSWORD, Core + Extensions + CouchDB) +* InfluxDB Storage Password (DOCKER_INFLUXDB_INIT_PASSWORD, InfluxDB) +* InfluxDB Storage Token (SP_TS_STORAGE_TOKEN (Core, Extensions) + * DOCKER_INFLUXDB_INIT_ADMIN_TOKEN (InfluxDB service) + +## Auto-generation of variables in K8s setups + +See the [Kubernetes Guide](05_deploy-kubernetes.md) for an overview of auto-generated variables. diff --git a/website-v2/versioned_docs/version-0.95.1/05_deploy-kubernetes.md b/website-v2/versioned_docs/version-0.95.1/05_deploy-kubernetes.md new file mode 100644 index 000000000..6734676b4 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/05_deploy-kubernetes.md @@ -0,0 +1,269 @@ +--- +id: deploy-kubernetes +title: Kubernetes Deployment +sidebar_label: Kubernetes Deployment +--- + +## Prerequisites +Requires Helm (https://helm.sh/) and an actively running Kubernetes cluster. + +## Usage +We provide helm chart options to get you going in the `installer/k8s`folder. + +**Starting** the default helm chart option is as easy as simply running the following command from the root of this folder: +> **NOTE**: Starting might take a while since we also initially pull all Docker images from Dockerhub. + +```bash +helm install streampipes ./ +``` +After a while, all containers should successfully started, indicated by the `Running` status. + +The `values.yaml` file contains several configuration options to customize your StreamPipes installation. See the section below for all configuration options. + +## Ingress + +The helm chart provides several options to configure an Ingress or to define an Ingressroute that directly integrates with Traefik. + +## Dynamic Volume Provisioning + +You can override the `storageClassName` variable to configure StreamPipes for dynamic volume provisioning. + +## Parameters + +Here is an overview of the supported parameters to configure StreamPipes. + +### Common parameters + +| Parameter Name | Description | Value | +|--------------------------------------------------|---------------------------------------------------------|-----------------------------------------| +| deployment | Deployment type (lite or full) | lite | +| preferredBroker | Preferred broker for deployment | "nats" | +| monitoringSystem | Enable monitoring system (true/false) | false | +| pullPolicy | Image pull policy | "Always" | +| restartPolicy | Restart policy for the container | Always | +| persistentVolumeReclaimPolicy | Reclaim policy for persistent volumes | "Delete" | +| persistentVolumeAccessModes | Access mode for persistent volumes | "ReadWriteOnce" | +| initialDelaySeconds | Initial delay for liveness and readiness probes | 60 | +| periodSeconds | Interval between liveness and readiness probes | 30 | +| failureThreshold | Number of consecutive failures for readiness probes | 30 | +| hostPath | Host path for the application | "" | + +### StreamPipes common parameters + +| Parameter Name | Description | Value | +|-------------------------------------------------|---------------------------------------------------------|------------------------------------------| +| streampipes.version | StreamPipes version | "0.93.0-SNAPSHOT" | +| streampipes.registry | StreamPipes registry URL | "apachestreampipes" | +| streampipes.auth.secretName | The secret name for storing secrets | "sp-secrets" | +| streampipes.auth.users.admin.user | The initial admin user | "admin@streampipes.apache.org" | +| streampipes.auth.users.admin.password | The initial admin password (leave empty for autogen) | "admin" | +| streampipes.auth.users.service.user | The initial service account user | "sp-service-client" | +| streampipes.auth.users.service.secret | The initial service account secret | empty (auto-generated) | +| streampipes.auth.encryption.passcode | Passcode for value encryption | empty (auto-generated) | +| streampipes.core.appName | StreamPipes backend application name | "backend" | +| streampipes.core.port | StreamPipes backend port | 8030 | +| streampipes.core.persistence.storageClassName | Storage class name for backend PVs | "hostpath" | +| streampipes.core.persistence.storageSize | Size of the backend PV | "1Gi" | +| streampipes.core.persistence.claimName | Name of the backend PersistentVolumeClaim | "backend-pvc" | +| streampipes.core.persistence.pvName | Name of the backend PersistentVolume | "backend-pv" | +| streampipes.core.service.name | Name of the backend service | "backend" | +| streampipes.core.service.port | TargetPort of the StreamPipes backend service | 8030 | +| streampipes.ui.appName | StreamPipes UI application name | "ui" | +| streampipes.ui.resolverActive | Flag for enabling DNS resolver for Nginx proxy | true | +| streampipes.ui.port | StreamPipes UI port | 8088 | +| streampipes.ui.resolver | DNS resolver for Nginx proxy | "kube-dns.kube-system.svc.cluster.local" | +| streampipes.ui.service.name | Name of the UI service | "ui" | +| streampipes.ui.service.type | Type of the UI service | "ClusterIP" | +| streampipes.ui.service.nodePort | Node port for the UI service | 8088 | +| streampipes.ui.service.port | TargetPort of the StreamPipes UI service | 8088 | +| streampipes.ingress.active | Flag for enabling Ingress for StreamPipes | false | +| streampipes.ingress.annotations | Annotations for Ingress | {} | +| streampipes.ingress.host | Hostname for Ingress | "" | +| streampipes.ingressroute.active | Flag for enabling IngressRoute for StreamPipes | true | +| streampipes.ingressroute.annotations | Annotations for IngressRoute | {} | +| streampipes.ingressroute.entryPoints | Entry points for IngressRoute | ["web", "websecure"] | +| streampipes.ingressroute.host | Hostname for IngressRoute | "" | +| streampipes.ingressroute.certResolverActive | Flag for enabling certificate resolver for IngressRoute | true | +| streampipes.ingressroute.certResolver | Certificate resolver for IngressRoute | "" | + + +### Extensions common parameters + +| Parameter Name | Description | Value | +|-------------------------------------------------|---------------------------------------------------------|------------------------------------------| +| extensions.iiot.appName | IIoT extensions application name | extensions-all-iiot | +| extensions.iiot.port | Port for the IIoT extensions application | 8090 | +| extensions.iiot.service.name | Name of the IIoT extensions service | extensions-all-iiot | +| extensions.iiot.service.port | TargetPort of the IIoT extensions service | 8090 | + + +### External common parameters + +#### Couchdb common parameters + +| Parameter Name | Description | Value | +|-------------------------------------------------|----------------------------------------------------------|------------------------------------------| +| external.couchdb.appName | CouchDB application name | "couchdb" | +| external.couchdb.version | CouchDB version | 3.3.1 | +| external.couchdb.user | CouchDB admin username | "admin" | +| external.couchdb.password | CouchDB admin password | empty (auto-generated) | +| external.couchdb.port | Port for the CouchDB service | 5984 | +| external.couchdb.service.name | Name of the CouchDB service | "couchdb" | +| external.couchdb.service.port | TargetPort of the CouchDB service | 5984 | +| external.couchdb.persistence.storageClassName | Storage class name for CouchDB PVs | "hostpath" | +| external.couchdb.persistence.storageSize | Size of the CouchDB PV | "1Gi" | +| external.couchdb.persistence.claimName | Name of the CouchDB PersistentVolumeClaim | "couchdb-pvc" | +| external.couchdb.persistence.pvName | Name of the CouchDB PersistentVolume | "couchdb-pv" | + +#### Influxdb common parameters + +| Parameter Name | Description | Value | +|-------------------------------------------------|----------------------------------------------------------|------------------------------------------| +| external.influxdb.appName | InfluxDB application name | "influxdb" | +| external.influxdb.version | InfluxDB version | 2.6 | +| external.influxdb.username | InfluxDB admin username | "admin" | +| external.influxdb.password | InfluxDB admin password | empty (auto-generated) | +| external.influxdb.adminToken | InfluxDB admin token | empty (auto-generated) | +| external.influxdb.initOrg | InfluxDB initial organization | "sp" | +| external.influxdb.initBucket | InfluxDB initial bucket | "sp" | +| external.influxdb.initMode | InfluxDB initialization mode | "setup" | +| external.influxdb.apiPort | Port number for the InfluxDB service (API) | 8083 | +| external.influxdb.httpPort | Port number for the InfluxDB service (HTTP) | 8086 | +| external.influxdb.grpcPort | Port number for the InfluxDB service (gRPC) | 8090 | +| external.influxdb.service.name | Name of the InfluxDB service | "influxdb" | +| external.influxdb.service.apiPort | TargetPort of the InfluxDB service for API | 8083 | +| external.influxdb.service.httpPort | TargetPort of the InfluxDB service for HTTP | 8086 | +| external.influxdb.service.grpcPort | TargetPort of the InfluxDB service for gRPC | 8090 | +| external.influxdb.persistence.storageClassName | Storage class name for InfluxDB PVs | "hostpath" | +| external.influxdb.persistence.storageSize | Size of the InfluxDB PV | "1Gi" | +| external.influxdb.persistence.storageSizeV1 | Size of the InfluxDB PV for v1 databases | "1Gi" | +| external.influxdb.persistence.claimName | Name of the InfluxDBv2 PersistentVolumeClaim | "influxdb2-pvc" | +| external.influxdb.persistence.claimNameV1 | Name of the InfluxDBv1 PersistentVolumeClaim | "influxdb-pvc" | +| external.influxdb.persistence.pvName | Name of the InfluxDBv2 PersistentVolume | "influxdb2-pv" | +| external.influxdb.persistence.pvNameV1 | Name of the InfluxDBv1 PersistentVolume | "influxdb-pv" | + + +#### Nats common parameters + +| Parameter Name | Description | Value | +|-------------------------------------------------|----------------------------------------------------------|------------------------------------------| +| external.nats.appName | NATS application name | "nats" | +| external.nats.port | Port for the NATS service | 4222 | +| external.nats.version | NATS version | | +| external.nats.service.type | Type of the NATS service | "NodePort" | +| external.nats.service.externalTrafficPolicy | External traffic policy for the NATS service | "Local" | +| external.nats.service.name | Name of the NATS service | "nats" | +| external.nats.service.port | TargetPort of the NATS service | 4222 | + + +#### Kafka common parameters + +| Parameter Name | Description | Value | +|-------------------------------------------------|----------------------------------------------------------|------------------------------------------| +| external.kafka.appName | Kafka application name | "kafka" | +| external.kafka.version | Kafka version | 2.2.0 | +| external.kafka.port | Port for the Kafka service | 9092 | +| external.kafka.external.hostname | Name which will be advertised to external clients. Clients which use (default) port 9094 | "localhost" +| external.kafka.service.name | Name of the Kafka service | "kafka" | +| external.kafka.service.port | TargetPort of the Kafka service | 9092 | +| external.kafka.service.portOutside | Port for Kafka client outside of the cluster | 9094 | +| external.kafka.persistence.storageClassName | Storage class name for Kafka PVs | "hostpath" | +| external.kafka.persistence.storageSize | Size of the Kafka PV | "1Gi" | +| external.kafka.persistence.claimName | Name of the Kafka PersistentVolumeClaim | "kafka-pvc" | +| external.kafka.persistence.pvName | Name of the Kafka PersistentVolume | "kafka-pv" | +| + +#### Zookeeper common parameters + +| Parameter Name | Description | Value | +|-------------------------------------------------|----------------------------------------------------------|------------------------------------------| +| external.zookeeper.appName | ZooKeeper application name | "zookeeper" | +| external.zookeeper.version | ZooKeeper version | 3.4.13 | +| external.zookeeper.port | Port for the ZooKeeper service | 2181 | +| external.zookeeper.service.name | Name of the ZooKeeper service | "zookeeper" | +| external.zookeeper.service.port | TargetPort of the ZooKeeper service | 2181 | +| external.zookeeper.persistence.storageClassName | Storage class name for ZooKeeper PVs | "hostpath" | +| external.zookeeper.persistence.storageSize | Size of the ZooKeeper PV | "1Gi" | +| external.zookeeper.persistence.claimName | Name of the ZooKeeper PersistentVolumeClaim | "zookeeper-pvc" | +| external.zookeeper.persistence.pvName | Name of the ZooKeeper PersistentVolume | "zookeeper-pv" | + + +#### Pulsar common parameters + +| Parameter Name | Description | Value | +|-------------------------------------------------|----------------------------------------------------------|------------------------------------------| +| external.pulsar.appName | pulsar application name | "pulsar" | +| external.pulsar.version | pulsar version | 3.0.0 | +| external.pulsar.port | Port for the pulsar service | 6650 | +| external.pulsar.service.name | Name of the pulsar service | "pulsar" | +| external.pulsar.service.port | TargetPort of the pulsar service | 6650 | +| external.pulsar.persistence.storageClassName | Storage class name for pulsar PVs | "hostpath" | +| external.pulsar.persistence.storageSize | Size of the pulsar PV | "1Gi" | +| external.pulsar.persistence.claimName | Name of the pulsar PersistentVolumeClaim | "pulsar-pvc" | +| external.pulsar.persistence.pvName | Name of the pulsar PersistentVolume | "pulsar-pv" | + +### Monitoring common parameters + +#### Monitoring - Prometheus + +| Parameter Name | Description | Value | +|-------------------------------------------------|----------------------------------------------------------|------------------------------------------| +| prometheus.appName | Prometheus application name | "prometheus" | +| prometheus.version | Prometheus version | 2.45.0 | +| prometheus.port | Prometheus port | 9090 | +| prometheus.service.name | Prometheus service name | "prometheus" | +| prometheus.service.port | Prometheus service port | 9090 | +| prometheus.persistence.storageClassName | Prometheus storage class name | "hostpath" | +| prometheus.persistence.storageSize | Prometheus storage size | "2Gi" | +| prometheus.persistence.claimName | Prometheus PVC claim name | "prometheus-pvc" | +| prometheus.persistence.pvName | Prometheus PV name | "prometheus-pv" | +| prometheus.persistence.tokenStorageSize | Prometheus token storage size | "16Ki" | +| prometheus.config.scrapeInterval | Prometheus scrape interval | 10s | +| prometheus.config.evaluationInterval | Prometheus evaluation interval | 15s | +| prometheus.config.backendJobName | Prometheus backend job name | "backend" | +| prometheus.config.extensionsName | Prometheus extensions job name | "extensions-all-iiot" | +| prometheus.config.tokenFileName | Prometheus token file name | "token" | +| prometheus.config.tokenFileDir | Prometheus token file directory | "/opt/data" + +#### Monitoring - Grafana + +| Parameter Name | Description | Value | +|-------------------------------------------------|----------------------------------------------------------|------------------------------------------| +| grafana.appName | Grafana application name | "grafana" | +| grafana.version | Grafana version | 10.1.2 | +| grafana.port | Grafana port | 3000 | +| grafana.service.name | Grafana service name | "grafana" | +| grafana.service.port | Grafana service port | 3000 | +| grafana.persistence.storageClassName | Grafana storage class name | "hostpath" | +| grafana.persistence.storageSize | Grafana storage size | "1Gi" | +| grafana.persistence.claimName | Grafana PVC claim name | "grafana-pvc" | +| grafana.persistence.pvName | Grafana PV name | "grafana-pv" | + + +## Auto-generation of parameters. + +The helm chart includes a `secrets.yaml` file which auto-generates several settings as follows: + +```yaml + +apiVersion: v1 +kind: Secret +metadata: + name: sp-secrets + namespace: {{ .Release.Namespace | quote }} +type: Opaque +data: + sp-initial-admin-password: {{ ternary (randAlphaNum 10) .Values.streampipes.auth.users.admin.password (empty .Values.streampipes.auth.users.admin.password) | b64enc | quote }} + sp-initial-client-secret: {{ ternary (randAlphaNum 35) .Values.streampipes.auth.users.service.secret (empty .Values.streampipes.auth.users.service.secret) | b64enc | quote }} + sp-encryption-passcode: {{ ternary (randAlphaNum 20) .Values.streampipes.auth.encryption.passcode (empty .Values.streampipes.auth.encryption.passcode) | b64enc | quote }} + sp-couchdb-password: {{ ternary (randAlphaNum 20) .Values.external.couchdb.password (empty .Values.external.couchdb.password) | b64enc | quote }} + sp-ts-storage-password: {{ ternary (randAlphaNum 20) .Values.external.influxdb.password (empty .Values.external.influxdb.password) | b64enc | quote }} + sp-ts-storage-token: {{ ternary (randAlphaNum 20) .Values.external.influxdb.adminToken (empty .Values.external.influxdb.adminToken) | b64enc | quote }} + +``` + + +## Deleting the current helm chart deployment: +```bash +helm uninstall streampipes +``` diff --git a/website-v2/versioned_docs/version-0.95.1/05_deploy-security.md b/website-v2/versioned_docs/version-0.95.1/05_deploy-security.md new file mode 100644 index 000000000..cae5bdcbf --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/05_deploy-security.md @@ -0,0 +1,75 @@ +--- +id: deploy-security +title: Security +sidebar_label: Security +--- + +## Overriding default settings + +At installation time, StreamPipes checks for available environment variables relevant for the securing the system. If they are not set, it will use the default values. + +The following variables are checked by the core at installation time: + +* SP_INITIAL_ADMIN_EMAIL The email address of the initial administrator. +* SP_INITIAL_ADMIN_PASSWORD The password of the initial administrator. +* SP_INITIAL_CLIENT_USER The initial client user, used by the extensions modules to make authenticated API requests to the core. +* SP_INITIAL_CLIENT_SECRET The default password of the initial client user. +* SP_SETUP_INSTALL_PIPELINE_ELEMENTS Indicates whether pipeline elements should be installed. +* SP_ENCRYPTION_PASSCODE The encryption passcode, used for securely storing secrets (e.g., database connection strings). +* SP_JWT_SECRET The JWT secret, used for signing JWT tokens. + +In addition, all extensions services that perform requests to the core will need to have the following environment variables set: + +* SP_CLIENT_USER The client user, used by the extensions modules to make authenticated API requests to the core. +* SP_CLIENT_SECRET The password of the client user. + +Note that there are default values for all environment variables that are set at installation time - make sure to change these settings when moving to production! + +## Configuration + +Most security-related settings can be set in the configuration section of StreamPipes. The *General* section allows to set self-service registration and password recovery (both are disabled by default and require a valid email configuration). +In the *Security* section, users, service accounts, roles and groups can be configured. + + +## User types + +StreamPipes distinguishes between User Accounts (real users that interact with StreamPipes over the UI or an API) and Service Accounts (user-independent accounts which solely use StreamPipes over the API). + +User accounts are typically used by extensions service that require API access to the core (e.g., to get a list of running pipelines). + +## Permissions + +StreamPipes v0.69.0 comes with more advanced mechanisms to manage permissions. +For each major resource (pipeline elements, pipelines, StreamPipes Connect adapters, dashboards, data explorer views), permissions can be assigned individually to users and groups. + +To ease permission handling, StreamPipes comes with a default number of roles with pre-assigned privileges: + +### Roles + +* Admin The administrator role has full access to all resources. +* Service Admin The service administrator role has full access to all resources, but has no access to the UI. +* Pipeline Admin has full control of pipelines (create, edit, delete, start, stop, pause, resume, etc.). +* Pipeline User has limited control of pipelines (read only). +* Dashboard Admin has full control of dashboards (create, edit, delete, etc.). +* Dashboard User has limited control of dashboards (read only). +* Data Explorer Admin has full control of data explorer views (create, edit, delete, etc.). +* Data Explorer User has limited control of data explorer views (read only). +* Connect Admin has full control of StreamPipes Connect adapters (create, edit, delete, etc.). + +### Groups + +Roles can be either assigned to specific users or groups. Any group can contain several members. +The permissions of a user are the union of the permissions of all roles assigned to the user and the groups to which the user belongs. + +### Changing permissions + +Any resource has a resource owner, which is the authority that created the resource. Resources can be either public or private. Public resources are available to all users, while the user role determines what the user can do with the resource. +E.g., a public pipeline created by a user of role ROLE_ADMIN can be edited by all users with role PIPELINE_ADMIN, while the same pipeline can be read by all users with role PIPELINE_USER. + +Permissions can only be changed by admin users currently. +In the overview section of each resource (e.g., pipelines and dashboards), a permission dialog is available to users with role ROLE_ADMIN. The dialog allows to assign users and groups to the individual resource. + + + + + diff --git a/website-v2/versioned_docs/version-0.95.1/05_deploy-use-ssl.md b/website-v2/versioned_docs/version-0.95.1/05_deploy-use-ssl.md new file mode 100644 index 000000000..d5762b8dc --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/05_deploy-use-ssl.md @@ -0,0 +1,35 @@ +--- +id: deploy-use-ssl +title: Use SSL +sidebar_label: Use SSL +--- + +This page explains how SSL Certificates can be used to provide transport layer security between your Browser and the Streampipes Backend. + +## Prerequisites +You need a valid Certificate consisting of a Private and a Public Key. Both Keys must be in PEM Format. Please note that your Private Key should never be shared, otherwise the communication can not be considered secure. + +## Edit docker-compose.yml +In order to use SSL you have to open port 443 on the nginx Service. Incoming insecure Traffic on Port 80 will be automatically rerouted to Port 443. + +The Environment-Variable NGINX_SSL must be set to "true". + +Finally you have to inject the Certificates into the Docker-Container. In the example below, the Certificates are placed in the directory /etc/ssl/private/ on the host machine. Please change the path according to the place where the Certificates are located on your machine. The path after the colon should not be changed! +```yaml +[...] + nginx: + image: apachestreampipes/ui + ports: + - "80:80" + - "443:443" + environment: + - NGINX_SSL=true + volumes: + - /etc/ssl/private/private.pem:/etc/nginx/ssl/ssl.pem + - /etc/ssl/private/public.pem:/etc/nginx/ssl/cert.pem + depends_on: + - backend + networks: + spnet: +[...] +``` diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-archetypes.md b/website-v2/versioned_docs/version-0.95.1/06_extend-archetypes.md new file mode 100644 index 000000000..a6907f0ee --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-archetypes.md @@ -0,0 +1,46 @@ +--- +id: extend-archetypes +title: Maven Archetypes +sidebar_label: Maven Archetypes +--- + +In this tutorial we explain how you can use the Maven archetypes to develop your own StreamPipes processors and sinks. +We use IntelliJ in this tutorial, but it works with any IDE of your choice. + +## Prerequisites +You need to have Maven installed, further you need an up and running StreamPipes installation on your development computer. + +## Create Project +To create a new project, we provide multiple Maven Archteypes. +Currently, we provide archetypes for standalone Java-based microservices and archetypes for the experimental Flink wrapper. +The commands required to create a new pipeline element project can be found below. Make sure that you select a version compatible with your StreamPipes installation. +Copy the command into your terminal to create a new project. +The project will be created in the current folder. +First, the ``groupId`` of the resulting Maven artifact must be set. +We use ``groupId``: ``org.example`` and ``artifactId``: ``ExampleProcessor``. +You can keep the default values for the other settings, confirm them by hitting enter. + +:::info Choosing the right version +Make sure that the version used to create your archetype matches your running Apache StreamPipes version. +In the example below, replace `{sp.version}` with the proper version, e.g., `0.92.0`. +::: + +```bash +mvn archetype:generate \ + -DarchetypeGroupId=org.apache.streampipes \ + -DarchetypeArtifactId=streampipes-archetype-extensions-jvm \ + -DarchetypeVersion={sp.version} +``` + + +## Project structure +Open the project in your IDE. +If everything worked, the structure should look similar to the following image. +In the *main* package, it is defined which processors / sinks you want to activate and the *pe.example* package contains two skeletons for creating a data processor and sink. +For details, have a look at the other parts of the Developer Guide, where these classes are explained in more depth. + +Project Structure + +## Next steps + +Click [here](06_extend-first-processor.md) to learn how to create your first data processor. diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-cli.md b/website-v2/versioned_docs/version-0.95.1/06_extend-cli.md new file mode 100644 index 000000000..e5f93cfd6 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-cli.md @@ -0,0 +1,190 @@ +--- +id: extend-cli +title: StreamPipes CLI +sidebar_label: StreamPipes CLI +--- + +The StreamPipes command-line interface (CLI) is focused on developers in order to provide an easy entrypoint to set up a suitable dev environment, either planning on developing + +* new extensions such as **connect adapters, processors, sinks** or, +* new core features for **backend** and **ui**. + +The main difference between the standard Docker/K8s installation is an improved communication between services running as containers and services running locally for development. + +The CLI can be found in the [main repository](https://github.com/apache/streampipes/tree/master/installer/cli) or in the ``installer/cli`` folder of the downloaded source code. + +## TL;DR + +```bash +streampipes env --list +[INFO] Available StreamPipes environment templates: +pipeline-element +... +streampipes env --set pipeline-element +streampipes up -d +``` +> **NOTE**: use `./installer/cli/streampipes` if you haven't add it to the PATH and sourced it (see section "Run `streampipes` from anywhere?"). + +## Prerequisites +The CLI is basically a wrapper around multiple `docker` and `docker-compose` commands plus some additional sugar. + +* Docker >= 17.06.0 +* Docker-Compose >= 1.26.0 (Compose file format: 3.4) +* Google Chrome (recommended), Mozilla Firefox, Microsoft Edge +* For Windows Developer: GitBash only + + +Tested on: (***macOS**, **Linux**, **Windows***) + +> **NOTE**: If you're using Windows the CLI only works in combination with GitBash - CMD, PowerShell won't work. + + +## CLI commands overview + +``` +StreamPipes CLI - Manage your StreamPipes environment with ease + +Usage: streampipes COMMAND [OPTIONS] + +Options: + --help, -h show help + --version, -v show version + +Commands: + clean Remove StreamPipes data volumes, dangling images and network + down Stop and remove StreamPipes containers + env Inspect and select StreamPipes environments + info Get information + logs Get container logs for specific container + ps List all StreamPipes container for running environment + pull Download latest images from Dockerhub + restart Restart StreamPipes environment + up Create and start StreamPipes container environment + +Run 'streampipes COMMAND --help' for more info on a command. +``` + +## Usage: Along dev life-cycle + +**List** available environment templates. +```bash +streampipes env --list +``` + +**Inspect** services in an available environment to know what kind of services it is composed of. +```bash +streampipes env --inspect pipeline-element +``` + +**Set** environment, e.g. `pipeline-element`, if you want to write a new pipeline element. +```bash +streampipes env --set pipeline-element +``` + +**Start** environment ( default: `dev` mode). Here the service definition in the selected environment is used to start the multi-container landscape. +> **NOTE**: `dev` mode is enabled by default since we rely on open ports to core service such as `couchdb`, `kafka` etc. to reach from the IDE when developing. If you don't want to map ports (except the UI port), then use the `--no-ports` flag. + +```bash +streampipes up -d +# start in production mode with unmapped ports +# streampipes up -d --no-ports +``` +Now you're good to go to write your new pipeline element :tada: :tada: :tada: + +> **HINT for extensions**: Use our [Maven archetypes](https://streampipes.apache.org/docs/docs/extend-archetypes/) to set up a project skeleton and use your IDE of choice for development. However, we do recommend using IntelliJ. + +> **HINT for core**: To work on `backend` or `ui` features you need to set the template to `backend` and clone the core repository [streampipes](https://github.com/apache/streampipes) - check the prerequisites there for more information. + +**Stop** environment and remove docker container +```bash +streampipes down +# want to also clean docker data volumes when stopping the environment? +# streampipes down -v +``` + +## Additionally, useful commands + +**Start individual services only?** We got you! You chose a template that suits your needs and now you only want to start individual services from it, e.g. only Kafka and InfluxDB. + +> **NOTE**: the service names need to be present and match your current `.spenv` environment. + +```bash +streampipes up -d kafka influxdb +``` + +**Get current environment** (if previously set using `streampipes env --set `). +```bash +streampipes env +``` + +**Get logs** of specific service and use optional `--follow` flag to stay attached to the logs. +```bash +streampipes logs --follow backend +``` + +**Update** all services of current environment +```bash +streampipes pull +``` + +**Restart** all services of current environment or specific services +```bash +streampipes restart +# restart backend +# streampipes restart backend +``` + +**Clean** your system and remove created StreamPipes Docker volumes, StreamPipes docker network and dangling StreamPipes images of old image layers. +```bash +streampipes clean +# remove volumes, network and dangling images +# streampipes clean --volumes +``` + +## Modify/Create an environment template +As of now, this step has to be done **manually**. All environments are located in `environments/`. + +```bash +├── adapter # developing a new connect adapter +├── backend # developing core backend features +├── basic # wanna run core, UI, connect etc from the IDE? +├── full # full version containing more pipeline elements +├── lite # few pipeline elements, less memory +├── pipeline-element # developing new pipeline-elements +└── ui # developing UI features +``` +**Modifying an existing environment template**. To modify an existing template, you can simply add a `` to the template. +> **NOTE**: You need to make sure, that the service your are adding exists in `deploy/standalone/service/`. If your're adding a completely new service take a look at existing ones, create a new service directory and include a `docker-compose.yml` and `docker-compose.dev.yml` file. + +``` +[environment:backend] +activemq +kafka +... + +``` + +**Creating a new** environment template. To create a new environment template, place a new file `environments/` in the template directory. Open the file and use the following schema. +> **IMPORTANT**: Please make sure to have `[environment:]` header in the first line of your new template matching the name of the file. Make sure to use small caps letters (lowercase) only. + +``` +[environment:] + + +... +``` + +## Run `streampipes` from anywhere? No problem +Simply add the path to this cli directory to your `$PATH` (on macOS, Linux) variable, e.g. in your `.bashrc` or `.zshrc`, or `%PATH%` (on Windows). + +For **macOS**, or **Linux**: + +```bash +export PATH="/path/to/streampipes-installer/installer/cli:$PATH" +``` + +For **Windows** add `installer\cli` to environment variables, e.g. check this [documentation](https://helpdeskgeek.com/windows-10/add-windows-path-environment-variable/). + + +## Upgrade to new version +To upgrade to a new version, simply edit the version tag `SP_VERSION` in the `.env` file. diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-client.md b/website-v2/versioned_docs/version-0.95.1/06_extend-client.md new file mode 100644 index 000000000..f584c3d2c --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-client.md @@ -0,0 +1,204 @@ +--- +id: extend-client +title: StreamPipes Client +sidebar_label: StreamPipes Client +--- + + +:::info Looking for Python support? + +This section explains how to use the Apache StreamPipes Java Client. Please read the Python docs to find out how to use +the client for Python. + +::: + +## About the StreamPipes client + +Sometimes you don't want to write your own extensions to StreamPipes, but want to interact with StreamPipes from +external application. +One example is to influence the lifecycle of pipelines - think of a feature which automatically starts or stops specific +pipelines that monitor the production of a specific product. + +Another example is to gather live data from Apache StreamPipes, e.g., to consume data that has been previously connected +by an external, standalone application. + +For such use cases, we provide the StreamPipes client, which is currently available in Python and Java. This section +covers the usage of the Java client. + +## Using the StreamPipes client + +:::info Choosing the right version + +Your client library version should match the installed Apache StreamPipes version. Replace `${streampipes.version}` with +the version of your installation, e.g., `0.92.0`. + +::: + +In your Java project, add the following dependency to your pom file: + +```xml + + + org.apache.streampipes + streampipes-client + ${streampipes.version} + + +``` + +## Obtaining an API token + +Overview StreamPipes Architecture + +To communicate with Apache StreamPipes, you need to provide proper credentials. There are two ways to obtain +credentials: + +* An API token, which is bound to a user. The API token can be generate from the UI clicking on the user icon and then + navigate to `Profile/API`. +* A service user, which can be created by users with role `Admin`. + +Service users can have their own permissions, while API tokens inherit all permissions from the corresponding user. + +## Connecting to StreamPipes + +Once you have your API token and configured your dependencies, you can connect to an Apache StreamPipes instance as +follows: + +```java + +CredentialsProvider credentials=StreamPipesCredentials + .withApiKey("admin@streampipes.apache.org","YOUR_API_KEY"); + +// Create an instance of the StreamPipes client + StreamPipesClient client=StreamPipesClient + .create("localhost",8082,credentials,true); + +``` + +The following configurations are required: + +* The `withApiKey` method expects the username and the API key. Alternatively, use the `withServiceToken` method to + authenticate as a service user. +* The client instance requires the hostname or IP address of your running StreamPipes instance. In addition, you need to + provide the port, the credentials object and a flag which needs to be set in case the StreamPipes instance is not + served over HTTPS. +* There are short-hand convenience options to create a client instance. + +## Working with the client + +Here are some examples how you can work with the StreamPipes client: + +```java + +// Get streams +List streams=client.streams().all(); + +// Get a specific stream + Optional stream=client.streams().get("STREAM_ID"); + +// see the schema of a data stream + EventSchema schema=stream.get().getEventSchema(); + +// print the list of fields of this stream + List fields=schema.getEventProperties(); + +// Get all pipelines + List pipelines=client.pipelines().all(); + +// Start a pipeline + PipelineOperationStatus status=client.pipelines().start(pipelines.get(0)); + +// Stop a pipeline with providing a pipeline Id + PipelineOperationStatus status=client.pipelines().stop("PIPELINE_ID"); + +// Get all pipeline element templates + List templates=client.pipelineElementTemplates().all(); + +// Get all data sinks + List dataSinks=client.sinks().all(); + + +``` + +## Consuming live data + +StreamPipes supports a variety of messaging protocols to internally handle data streams. If you plan to gather live data +from the client library, you also need to add one or more of the supported messaging +protocols to the pom file. The default protocol depends on the StreamPipes configuration and is set in the `.env` file +in your installation folder. + +```xml + + + + org.apache.streampipes + streampipes-messaging-kafka + ${streampipes.version} + + + + +org.apache.streampipes +streampipes-messaging-nats +${streampipes.version} + + + + + +org.apache.streampipes +streampipes-messaging-mqtt +${streampipes.version} + + +``` + +In addition, add the message format that is used internally by StreamPipes. The default message format used by +StreamPipes is JSON, so let's include the dependency as well: + +```xml + + + + org.apache.streampipes + streampipes-dataformat-json + ${streampipes.version} + + +``` + +Once you've imported the dependencies, it is easy to consume live data. First, register the protocols and formats in +your client instance: + +```java + +client.registerProtocol(new SpKafkaProtocolFactory()); + +// or Nats: + client.registerProtocol(new SpNatsProtocolFactory()); + +// data format: + client.registerDataFormat(new JsonDataFormatFactory()); + +``` + +Then, you are ready to consume data: + +```java + +client.streams().subscribe(dataStreams.get(0),new EventProcessor() { +@Override +public void onEvent(Event event) { + // example + MapUtils.debugPrint(System.out,"event",event.getRaw()); + } + }); + +``` + +:::tip + +There are many more options to work with the StreamPipes Client - e.g., you can trigger emails directly from the API. +Just explore the various classes and interfaces provided by the client! + +::: diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-customize-ui.md b/website-v2/versioned_docs/version-0.95.1/06_extend-customize-ui.md new file mode 100644 index 000000000..c09823ffb --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-customize-ui.md @@ -0,0 +1,226 @@ +--- +id: extend-customize-ui +title: UI customization +sidebar_label: UI customization +--- + + +## Custom theme + +It is possible to use a custom theme with individual styles, logos and images instead of the default StreamPipes theme. + +In this section, we describe the necessary steps to build and deploy a custom theme. + + +### Prerequisite: Learn how to run and build the UI + +To use a custom theme, it is required to build the UI with the custom settings. +In general, the UI can be found in the `ui` folder of the source code. + +Perform the following steps to build the UI; + +```bash + +# Install all necessary packages +npm install + +# Start the UI for development purposes +npm run start + +# Build the StreamPipes UI +npm run build + +``` + +## Customizable assets + +The following assets can be provided in a customized theme: + +* **Logo** This is the main logo image, which is shown e.g., on the login page. +* **Navigation Logo** This is the logo which appears in the top navigation bar after successful login +* **Favicon** The favicon is shown in the browser navbar. It is also used as the loading animation in StreamPipes. +* **String constants** Customizable strings, e.g., when you want to use another application name than **Apache StreamPipes**. +* **Theme variables** An scss file which defines custom colors and layouts. + +## Customize constants + +To customize constants, you can create a custom file `app.constants.ts` and modify the content based on the template below: + +```javascript + +import {Injectable} from '@angular/core'; + +@Injectable() +export class AppConstants { + + public readonly APP_NAME = "Apache StreamPipes"; + public readonly APP_TITLE = 'Apache StreamPipes'; + public readonly EMAIL = "admin@streampipes.apache.org"; +} + + +``` + +## Customize theme + +To customize the theme, we provide a file named `variables.scss` which can be overridden with default color and style settings. + +See the example below: + +```scss + +/*! + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +$sp-color-primary: rgb(57, 181, 74); +$sp-color-primary-600: #06c12a; + +$sp-color-accent: #1b1464; + +$sp-color-accent-light-blue: rgb(59, 92, 149); +$sp-color-accent-light: rgb(156, 156, 156); +$sp-color-accent-light-transparent: rgba(156, 156, 156, 0.4); + +$sp-color-accent-dark: #83a3de; + +$sp-color-adapter: #7f007f; +$sp-color-stream: #ffeb3b; +$sp-color-processor: #009688; +$sp-color-sink: #3f51b5; + +$sp-color-error: #b71c1c; + +body { + --color-data-view: rgb(122, 206, 227); + --color-dashboard: rgb(76, 115, 164); + --color-adapter: rgb(182, 140, 97); + --color-data-source: #ffeb3b; + --color-pipeline: rgb(102, 185, 114); + --color-measurement: rgb(39, 164, 155); + --color-file: rgb(163, 98, 190); + + --button-border-radius: 5px; + --iconbar-width: 35px; + --navbar-icon-border-radius: 0; + --navbar-icon-padding: 0; +} + +:root { + --color-loading-bar: #{$sp-color-accent}; +} + +.dark-mode { + --color-primary: #{$sp-color-primary}; + --color-accent: #{$sp-color-accent-dark}; + --color-bg-outer: var(--color-bg-1); + --color-bg-page-container: var(--color-bg-0); + --color-bg-main-panel-header: var(--color-bg-0); + --color-bg-main-panel-content: var(--color-bg-0); + --color-bg-navbar-icon: inherit; + --color-bg-navbar-icon-selected: inherit; + --color-bg-0: #121212; + --color-bg-1: #282828; + --color-bg-2: #404040; + --color-bg-3: #424242; + --color-bg-4: #5f5f5f; + --color-bg-dialog: rgb(66, 66, 66); + --color-shadow: #c4c4c4; + --color-pe: #404040; + --color-default-text: rgba(255, 255, 255, 0.87); + --color-warn: #b36161; + + --color-tab-border: #cccccc; + + --color-navigation-bg: var(--color-primary); + --color-navigation-link-text: var(--color-bg-0); + --color-navigation-text: #121212; + --color-navigation-selected: #{$sp-color-primary}; + --color-navigation-hover: #{$sp-color-primary-600}; + --color-navigation-bg-selected: var(--color-bg-1); + --color-navigation-divider: #{$sp-color-primary}; + + --content-box-color: #404040; + --canvas-color: linear-gradient( + 90deg, + rgba(50, 50, 50, 0.5) 10%, + transparent 0% + ), + linear-gradient(rgba(50, 50, 50, 0.5) 10%, transparent 0%); +} + +.light-mode { + --color-primary: #{$sp-color-primary}; + --color-accent: #{$sp-color-accent}; + --color-bg-outer: var(--color-bg-1); + --color-bg-page-container: var(--color-bg-0); + --color-bg-main-panel-header: var(--color-bg-0); + --color-bg-main-panel-content: var(--color-bg-0); + --color-bg-navbar-icon: inherit; + --color-bg-navbar-icon-selected: inherit; + --color-bg-0: #ffffff; + --color-bg-1: #fafafa; + --color-bg-2: #f1f1f1; + --color-bg-3: rgb(224, 224, 224); + --color-bg-4: rgb(212, 212, 212); + --color-bg-dialog: #ffffff; + --color-shadow: #555; + --color-pe: #ffffff; + --color-default-text: #121212; + --color-warn: #b71c1c; + + --color-tab-border: #cccccc; + + --color-navigation-bg: var(--color-primary); + --color-navigation-link-text: var(--color-bg-0); + --color-navigation-text: #ffffff; + --color-navigation-selected: #{$sp-color-primary}; + --color-navigation-hover: #{$sp-color-primary-600}; + --color-navigation-bg-selected: var(--color-bg-1); + --color-navigation-divider: var(--color-primary); + + --content-box-color: rgb(156, 156, 156); + --canvas-color: linear-gradient( + 90deg, + rgba(208, 208, 208, 0.5) 10%, + transparent 0% + ), + linear-gradient(rgba(208, 208, 208, 0.5) 10%, transparent 0%); +} + +``` +## Run a customized build + +To create a new UI build with customized themes, use the following command: + +````bash + +UI_LOC=PATH_TO_FOLDER_WITH_CUSTOM_TEMPLATES \\ +THEME_LOC=$UI_LOC/_variables.scss \\ +LOGO_HEADER_LOC=$UI_LOC/img/logo.png \\ +FAVICON_LOC=$UI_LOC/img/favicon.png \\ +LOGO_NAV_LOC=$UI_LOC/img/logo-navigation.png \\ +CONSTANTS_FILE=$UI_LOC/app.constants.ts \\ +npm run build + +```` + +First, we create a helper environment variable that links to a folder which includes custom logos, the theme file and constants. +Next, we set the variables above to override default logos and stylings. +Finally, the usual build process is executed. + +Once finished, you've successfully customized an Apache StreamPipes instance! diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-first-processor.md b/website-v2/versioned_docs/version-0.95.1/06_extend-first-processor.md new file mode 100644 index 000000000..96080508f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-first-processor.md @@ -0,0 +1,54 @@ +--- +id: extend-first-processor +title: Your first data processor +sidebar_label: Your first data processor +--- + +In this section, we will explain how to start a pipeline element service and install it using the StreamPipes UI. + +Open the class *ExampleDataProcessor* and edit the ``onEvent`` method to print the incoming event, log it to the console and send it to the next component without changing it. + +```java +@Override +public void onEvent(Event event, SpOutputCollector collector) { + // Print the incoming event on the console + System.out.println(event); + + // Hand the incoming event to the output collector without changing it. + collector.collect(event); +} +``` + +## Start Processor +Starting from StreamPipes 0.69.0, the IP address of an extensions service (processor, adapter or sink) will be auto-discovered upon start. +The auto-discovery is done by the StreamPipes service discovery mechanism and should work for most setups. +Once you start an extensions service, you will see the chosen IP in printed in the console. Make sure that this IP does not point to localhost (127.0.0.1). +If you see such an IP or the extensions service complains that it cannot resolve the IP, you can manually set the IP address of the extensions service. You can do so by providing an SP_HOST environment variable. + + +Project Structure + +To check if the service is up and running, open the browser on *'localhost:8090'* (or the port defined in the service definition). The machine-readable description of the processor should be visible as shown below. + + +:::caution Common Problems +If the service description is not shown on 'localhost:8090', you might have to change the port address. +This needs to be done in the configuration of your service, further explained in the configurations part of the developer guide. + +If the service does not show up in the StreamPipes installation menu, click on 'MANAGE ENDPOINTS' and add 'http://YOUR_IP_OR_DNS_NAME:8090'. +Use the IP or DNS name you provided as the SP_HOST variable or the IP (if resolvable) found by the auto-discovery service printed in the console. +After adding the endpoint, a new processor with the name *Example* should show up. +::: + +Now you can go to StreamPipes. +Your new processor *'Example'* should now show up in the installation menu ("Install Pipeline Elements" in the left navigation bar). +Install it, then switch to the pipeline view and create a simple pipeline that makes use of your newly created processor. +In case you opened the StreamPipes installation for the first time, it should have been automatically installed during the setup process. + +Project Structure + +Start this pipeline. +Now you should see logging messages in your console and, once you've created a visualization, you can also see the resulting events of your component in StreamPipes. + +Congratulations, you have just created your first processor! +From here on you can start experimenting and implement your own algorithms. diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-event-model.md b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-event-model.md new file mode 100644 index 000000000..42cc8d472 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-event-model.md @@ -0,0 +1,141 @@ +--- +id: extend-sdk-event-model +title: "SDK Guide: Event Model" +sidebar_label: "SDK: Event Model" +--- + +## Introduction + +This guide explains the usage of the event model to manipulate runtime events for data processors and data sink. + +## Prerequisites + +This guide assumes that you are already familiar with the basic setup of [data processors](06_extend-first-processor.md). + +### Property Selectors + +In most cases, fields that are subject to be transformed by pipeline elements are provided by the assigned ``MappingProperty`` (see the guide on [static properties](extend-sdk-static-properties)). + +Mapping properties return a ``PropertySelector`` that identifies a field based on (i) the **streamIndex** and (ii) the runtime name of the field. +Let's assume we have an event with the following structure: + +```json +{ + "timestamp" : 1234556, + "temperature" : 37.0, + "deviceId" : "sensor1", + "running" : true, + "location" : {"latitude" : 34.4, "longitude" : -47}, + "lastValues" : [45, 22, 21] +} +``` + +In addition, we assume that a data processor exists (with one input node) that converts the temperature value (measured in degrees celsius) to a degree fahrenheit value. +In this case, a mapping property (selected by the pipeline developer in the StreamPipes UI) would link to the ``temperature`` field of the event. + +The mapping property value will be the ``PropertySelector`` of the temperature value, which looks as follows: + +``` +s0::temperature +``` + +``s0`` identifies the stream (in this case, only one input streams exist, but as data processors might require more than one input stream, a stream identifier is required), while the appendix identifies the runtime name. + +Note: If you add a new field to an input event, you don't need to provide the selector, you can just assign the runtime name as defined by the [output strategy](extend-sdk-output-strategies). + +### Reading Fields + +You can get a field from an event by providing the corresponding selector: + +```java + +@Override + public void onEvent(Event event, SpOutputCollector out) { + + PrimitiveField temperatureField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsPrimitive(); + } + +``` + +Similarly, if your mapping property links to a nested property, use + +```java + +@Override + public void onEvent(Event event, SpOutputCollector out) { + + NestedField nestedField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsNested(); + } + +``` + +and for a list-based field: + +```java + +@Override + public void onEvent(Event event, SpOutputCollector out) { + + ListField listField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsList(); + } + +``` + +### Parsing Fields + +#### Primitive Fields + +A ``PrimitiveField`` contains convenience methods to directly cast a field to the target datatype: + +```java + +// parse the value as a float datatype +Float temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsFloat(); + +// or do the same with a double datatype +Double temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsDouble(); + +// extracting a string +String deviceId = event.getFieldBySelector(deviceIdSelector).getAsPrimitive().getAsString(); + +// this also works for extracting fields from nested fields: +Double latitude = event.getFieldBySelector(latitudeSelector).getAsPrimitive().getAsDouble(); + +// extracting boolean values +Boolean running = event.getFieldBySelector(runningSelector).getAsPrimitive().getAsBoolean(); +``` + +In rare cases, you might want to receive a field directly based on the runtime name as follows: + +```java +Double temperature = event.getFieldByRuntimeName("temperature").getAsPrimitive().getAsDouble(); +``` + +#### List Fields + +Lists can also be retrieved by providing the corresponding selector and can automatically be parsed to a list of primitive datatypes: + +```java + +List lastValues = event.getFieldBySelector(lastValueSelector).getAsList().parseAsSimpleType(Integer.class); + +``` + +(coming soon: parsing complex lists) + + +### Adding/Updating Fields + +Primitive fields can easily be added to an event by providing the runtime name and the object: + +```java + + // add a primitive field with runtime name "city" and value "Karlsruhe" + event.addField("city", "Karlsruhe"); + + // remove the field "temperature" from the event + event.removeFieldBySelector(temperatureSelector); + + // add a new field + event.addField("fahrenheit", 48); +``` diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-functions.md b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-functions.md new file mode 100644 index 000000000..659690b71 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-functions.md @@ -0,0 +1,127 @@ +--- +id: extend-sdk-functions +title: "SDK Guide: Functions" +sidebar_label: "SDK: Functions" +--- + +## Introduction + +Pipeline elements such as data processors and data sinks are a great way +to create _reusable_ components that can be part of pipelines. +However, creating a pipeline element is not always the best choice: + +* The behaviour of a data processor is bound to a specific input stream _and_ +* A data processor doesn't contain any user-defined configuration _and_ +* The intended action is fixed or known at build time and the data processor shouldn't be available in the pipeline editor. + +To cover such use cases, we provide _StreamPipes Functions_. Functions +are a great way to define custom processing logic based on previously +connected data streams. + +Functions can be registered in a similar way to pipeline elements, but define expected input +streams at startup time. Functions are started once the corresponding _extensions service_ starts +and run until the service is stopped. + +## Writing a function + +:::caution Work in Progress + +Functions are currently in preview mode and are not yet recommended for production usage. +APIs are subject to change in a future version. + +::: + + +To define a function, create a new extensions service using the [Maven Archetypes](06_extend-archetypes.md) or use an already existing service. + +### Skeleton + +Functions can be defined by creating a new class which extends the ``StreamPipesFunction`` class. + +The basic skeleton looks like this: + +```java +public class StreamPipesFunctionExample extends StreamPipesFunction { + + @Override + public FunctionId getFunctionId() { + return FunctionId.from("my-function-id", 1); + } + + @Override + public List requiredStreamIds() { + return List.of(""); + } + + @Override + public void onServiceStarted(FunctionContext context) { + // called when the service is started + } + + @Override + public void onEvent(Event event, String streamId) { + // called when an event arrives + } + + @Override + public void onServiceStopped() { + // called when the service is stopped + } +} + +``` + +The structure of a function class is easy to understand: +* _getFunctionId_ requires an identifier in form of a ``FunctionId``, which defines the id itself along with a version number that can be freely chosen. +* _requiredStreamIds_ expects a list of references to data streams that are already available in StreamPipes. See below to learn how to find the id of a stream in StreamPipes. +* _onServiceStarted_ is called once the extensions service is started and can be used to initialize the function. +* _onEvent_ is called every time a new event arrives and provides a ``streamId`` as a reference to the corresponding stream, which is useful in case multiple data streams are received by the function. +* _onServiceStopped_ is called when the extensions service is stopped and can be used to perform any required cleanup. + +### Getting a stream ID + +Functions require a reference to all data streams that should be retrieved by the function. +Currently, the only way to get the ID of a function is by navigating to the ``Asset Management`` view in the StreamPipes UI. +Create a new asset, click on ``Edit Asset`` and open ``Add Link`` in the _Linked Resources_ panel. +Choose ``Data Source`` as link type, select one of the available sources, copy the ``Resource ID`` and provide this ID in the ``requiredStreamIds`` method. + +### Function Context + +The ``onServiceStarted`` method provides a function context which provides several convenience methods to work with functions: + +* _getFunctionId_ returns the current function identifier +* _getConfig_ returns a reference to configuration options of the extensions service +* _getClient_ returns a reference to the StreamPipes client to interact with features from the REST API. +* _getStreams_ returns the data model of all data streams defined in the ``requiredStreamIds`` method. +* _getSchema_ returns the schema of a specific data stream by providing the ``streamId`` + + +## Registering a function + +Registering a function is easy and can be done in the _Init_ class of the service. +E.g., considering a service definition as illustrated below, simply call ``registerFunction`` and +provide an instance of your function. + +```java + + @Override + public SpServiceDefinition provideServiceDefinition() { + return SpServiceDefinitionBuilder.create("my-service-id", + "StreamPipes Function Example", + "", + 8090) + .registerFunction(new MyExampleFunction()) + .registerMessagingFormats( + new JsonDataFormatFactory()) + .registerMessagingProtocols( + new SpNatsProtocolFactory()) + .build(); + } + +``` + +## Metrics & Monitoring + +Similar to pipeline elements, function register at the StreamPipes core. +Running functions can be seen in the pipeline view of the user interface under _Functions_, right below the list of available pipelines. +Similar to pipelines, simple metrics, monitoring info and exceptions can be viewed in the _Details_ section of each function. diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-migration-sd.md b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-migration-sd.md new file mode 100644 index 000000000..1fd5200ad --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-migration-sd.md @@ -0,0 +1,117 @@ +--- +id: extend-sdk-migration-service-discovery +title: "Migration Guide: New Service Discovery in 0.69.0" +sidebar_label: "Migration Guide: 0.69.0" +--- + + +## Introduction +As part of our roadmap towards a release 1.0, Apache StreamPipes 0.69.0 features a new service discovery approach along with performance improvements related to a new storage layer for pipeline element descriptions. + +The new service discovery approach is better suited for cloud-native scenarios, as the hostname of a pipeline element is now decoupled from its description. As such, StreamPipes now supports recovery of pipeline elements independent from their assigned host. +In addition, the new approach simplifies development of StreamPipes, e.g., in cases where the core is running in Docker and pipeline elements are developed on a local machine. In this case, the IP of the host machine should now be auto-discovered so that provision of environement variables should now be obsolete in most cases. +The second large improvement is related to the replacement of RDF4J as the storage engine with a NoSQL database. This leads to much faster load times (you will notice this especially at system startup). + +While we are heavily working towards a stable release 1.0, we decided to put our efforts into the remaining features required for 1.0 and do not provide an auto-migration related to some breaking changes. +Therefore, we recommend to reinstall StreamPipes when updating to 0.69.0. We currently plan to have at most two more releases before releasing the first 1.x version of StreamPipes. + +## Installation +* Before upgrading to 0.69.0, clean any existing installation (e.g., by running ``docker-compose down -v``) and make sure that no volumes of StreamPipes are left. +* Upgrade to the latest installer version (can be found at [streampipes/installer](https://github.com/apache/streampipes/tree/dev/installer)) +* Upon restart, make sure that the setup dialog appears (make sure that the new StreamPipes logo appears) and re-initialize the system. + +## SDK changes + +0.69.0 comes with a new ``ServiceDefinitionBuilder`` for pipelines, which simplifies the definition of a pipeline element. + +The ServiceDefinitionBuilder requires an ID of your extensions service, an optional title and description and a default port. +It is best to provide 8090 as the default port, so that this will be the standard port of all StreamPipes extensions services at deployment time in a containerized environment. +The port port can always be overriden by providing an ``SP_PORT`` environment variable. + +### Init class + +Modify the Init class of your pipeline element service as follows: + +```java +public class ExamplesInit extends StandaloneModelSubmitter { + + public static void main(String[] args) { + new ExamplesInit().init(); + } + + @Override + public SpServiceDefinition provideServiceDefinition() { + return SpServiceDefinitionBuilder.create("org.apache.streampipes.processors.examples.jvm", + "StreamPipes Code Examples", + "", + 8090) + .registerMessagingProtocols(new SpKafkaProtocolFactory(), new SpJmsProtocolFactory()) + .registerMessagingFormats(new JsonDataFormatFactory()) + .registerPipelineElement(new MyPipelineElementController()) + .registerAdapter(new MyAdapter()) + .build(); + } +} +``` + +You can now easily define a StreamPipes extensions service that supports both custom adapters and pipeline elements by using the following Maven dependency: +This is optional and no changes to your existing Maven dependencies (except the version, e.g., 0.69.0-SNAPSHOT) are required. + +```maven + + org.apache.streampipes + streampipes-container-extensions + +``` + + +### Configs +Prior to version 0.69.0, additionally configs had to be provided in a separate ``Config`` class. This is now obsolete - configs can be directly provided within the builder class as follows: + +```java + + @Override + public SpServiceDefinition provideServiceDefinition() { + return SpServiceDefinitionBuilder.create("org.apache.streampipes.processors.examples.jvm", + "StreamPipes Code Examples", + "", + 8090) + .registerPipelineElement(new MyPipelineElement()) + .registerAdapter(new MyAdapter()) + .addConfig("key", 1) + .addConfig("my-string-config", "myvalue") + .build(); + } +``` + +Configs can be easily accessed from the ``EventProcessorRuntimeContext`` (or ``EventSinkRuntimeContext``): + +```java +@Override + public void onInvocation(Parameters params, + SpOutputCollector spOutputCollector, + EventProcessorRuntimeContext context) { + + Integer myConfigValue = context.getConfigStore().getConfig().getInteger("key"); + } +``` + + +### Service Discovery +An extensions service can be started by executing the Init class. +StreamPipes will now automatically select the proper service IP address and register the service at the backend. +You can inspect the selected IP address in the console: + +``` +2024-05-16T11:03:37.158+02:00 INFO --- [ main] o.a.s.commons.networking.Networking : Using auto-discovered IP: 192.168.178.22 +2024-05-16T11:03:37.158+02:00 INFO --- [ main] o.a.s.commons.networking.Networking : Using port from provided environment variable SP_PORT: 7023 +2024-05-16T11:03:37.372+02:00 INFO --- [ main] a.s.s.e.StreamPipesExtensionsServiceBase : Registering service org.apache.streampipes.extensions.all.jvm with id org.apache.streampipes.extensions.all.jvm-FUt84Y at core +2024-05-16T11:03:37.814+02:00 INFO --- [ main] o.a.s.s.extensions.CoreRequestSubmitter : Successfully registered service at core. +2024-05-16T11:03:37.814+02:00 INFO --- [ main] a.s.s.e.StreamPipesExtensionsServiceBase : Registering 1 service configs for service org.apache.streampipes.extensions.all.jvm +``` + +In some (rare) cases, a non-resolvable IP will be selected. In this case, you can manually override the IP by providing a ``SP_HOST`` environment variable. This falls back to a similar behaviour as in pre-0.69.0-versions and will use the manually provided IP. + + + + diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-migrations.md b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-migrations.md new file mode 100644 index 000000000..bf822c1b8 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-migrations.md @@ -0,0 +1,179 @@ +--- +id: extend-sdk-migration +title: "SDK Guide: Pipeline Element Migration" +sidebar_label: "SDK: PE Migration" +--- + +Pipeline element migrations allow you to automatically update and migrate both existing and pipeline elements when a new +version of StreamPipes is installed. This means that whenever you upgrade StreamPipes, all existing and future +pipeline elements will be directly compatible with the new version without any manual interaction. Pipeline elements +include adapters, data processors, and data sinks. + +:::info +Migrations will make their debut in StreamPipes version `0.93.0` and will be an integral part of the system going +forward. +However, it's important to note that this feature is not available in any of the previous versions of StreamPipes. To +take full advantage of migrations and their benefits, it is recommended to upgrade to version `0.93.0` or later. This +will +ensure that you have access to the latest enhancements and maintain compatibility with the evolving StreamPipes +platform. +::: + +## Define Migrations + +Whenever a pipeline element, be it an adapter, data processor, or data sink, undergoes changes that result in +modifications to its configuration options, developers must additionally create a migration procedure. This migration +process should be capable of smoothly transitioning all affected instances from the previous version to the new one. +The migration itself is automatically managed and executed by StreamPipes. Developers are only responsible for two key +aspects: + +* **Implementing the concrete migration**: Developers need to craft the specific migration logic that facilitates the + seamless transition of configuration options. +* **Registering the migration**: Developers should register their migration procedures at the extensions service, + allowing StreamPipes to identify and apply the necessary updates to affected instances. + +By adhering to these two essential tasks, developers can ensure a hassle-free evolution of pipeline elements while +StreamPipes handles the orchestration of the migration process. + +The following gives a concrete example of creating a migration for +the [S7 adapter](./pe/org.apache.streampipes.connect.iiot.adapters.plc4x.s7.md). +Thereby, we assume this adapter has received a new input element which determines whether the connection should be made +authenticated or not. +This is represented by a simple boolean that is visualized as a toggle button in the UI. + +### Implementing a Concrete Migration + +StreamPipes offers three distinct migration mechanisms tailored to specific types of pipeline +elements: `IAdapterMigrator`, `IDataProcessorMigrator`, and `IDataSinkMigrator`. +These migration mechanisms are presented as interfaces and require the implementation of two fundamental methods: + +* `config()`: This method defines the configuration for the migration, encompassing all essential metadata related to + the migration process. +* `migrate()`: Within this method, the actual migration logic is to be implemented. It serves as the operational core + for facilitating the migration for the respective pipeline element. + +In accordance with the example described above, we will implement the `Plc4xS7AdapterMigrationV1` in the following. + +:::note +Before we begin, it's important to familiarize ourselves with two key conventions that guide our approach to migrations: + +* To maintain clarity and organization, all migration classes associated with a specific pipeline element are located + within a dedicated sub-package named `migration`. This sub-package is nested within the package of the respective + pipeline element. +* Migration classes are named according to a specific schema: `MigrationV`. For + example, if you are working on a migration for the PLC4x S7 adapter targeting version 1, the migration class would be + named `Plc4xS7AdapterMigrationV1`. +::: + +Let's begin with providing the migration's configuration: + +```java +@Override +public ModelMigratorConfig config() { + return new ModelMigratorConfig( + "org.apache.streampipes.connect.iiot.adapters.plc4x.s7", + SpServiceTagPrefix.ADAPTER, + 0, + 1 + ); +} +``` + +The migration config consists of the following four parts: + +* `targetAppId`: this needs to equal the app id of the targeted element +* `modelType`: the type of the element to be migrated, this can be one + of: `SpServiceTagPrefix.ADAPTER`, `SpServiceTagPrefix.DATA_PROCESSOR`, `SpServiceTagPrefix.DATA_SINK`. +* `fromVersion`: the version of the element that the migration expects as input +* `toVersion`: the version the element has after the migration (needs to be at least `fromVersion + 1`) + +The second step is to implement the actual migration logic. +In our example, we need to extend the existing static properties by an additional boolean property. + +```java +@Override +public MigrationResult migrate(AdapterDescription element, IStaticPropertyExtractor extractor) throws RuntimeException { + + var config = element.getConfig(); + + var slideToggle = new SlideToggleStaticProperty(); + slideToggle.setDefaultValue(false); + slideToggle.setLabel("Authentication required?"); + config.add(slideToggle); + + element.setConfig(config); + return MigrationResult.success(element); +} +``` + +We've completed all the necessary steps for our migration. The final task remaining is to register the migration within +the service definition. + +### Registering the Migration + +Only when the migration is registered at the service definition, the migration is sent to the StreamPipes core service. +Therefore, we need to add the migration to the same service definition as the element to migrate. +In our example this is defined in `ConnectAdapterIiotInit`: + +```java jsx {22-24} showLineNumbers +@Override +public SpServiceDefinition provideServiceDefinition() { + return SpServiceDefinitionBuilder.create("connect-adapter-iiot", + "StreamPipes connect worker containing adapters relevant for the IIoT", + "", + 8001) + .registerAdapter(new MachineDataSimulatorAdapter()) + .registerAdapter(new FileReplayAdapter()) + .registerAdapter(new IfmAlMqttAdapter()) + .registerAdapter(new RosBridgeAdapter()) + .registerAdapter(new OpcUaAdapter()) + .registerAdapter(new Plc4xS7Adapter()) + .registerAdapter(new Plc4xModbusAdapter()) + .registerAdapter(new KafkaProtocol()) + .registerAdapter(new MqttProtocol()) + .registerAdapter(new NatsProtocol()) + .registerAdapter(new HttpStreamProtocol()) + .registerAdapter(new PulsarProtocol()) + .registerAdapter(new RocketMQProtocol()) + .registerAdapter(new HttpServerProtocol()) + .registerAdapter(new TubeMQProtocol()) + .registerMigrators( + new Plc4xS7AdapterMigrationV1() + ) + .build(); +``` + +
+ +## How Migrations are Handled Internally + +Migrations are handled by an interplay between the Extension Service, which provides the migrations, +and the StreamPipes Core Service, which manages the migrations, as shown in the figure below: +Interplay of extensions service and core to handle migrations + +When an extensions service is initiated and has successfully registered itself with the core, it proceeds to send a +request to the core. This request includes a comprehensive list of all available migrations that have been registered +for it. Since this collection of migrations may encompass multiple migrations that affect the same pipeline element, +the migrations are first de-duplicated and then sorted based on their version range before being transmitted. + +Upon receiving these migrations, the core's actions can be categorized into two distinct parts: + +* Update descriptions for new elements +* Update descriptions for existing elements + +### Update Descriptions for New Elements + +Each migration transmitted from the extensions service to the core triggers the core to update the description of the +corresponding element stored in CouchDB. This is achieved by requesting the current configuration from the extensions +service and subsequently overwriting the existing configuration in the storage. + +### Update Descriptions for Existing Elements + +For each migration sent from the extensions service to the core, the core conducts a thorough check to determine if any +existing elements are affected by this migration. If such elements are identified, the extensions service is tasked with +requesting and subsequently executing the migration on behalf of the core. + +In scenarios where multiple applicable migrations exist for a single pipeline element, they are sequentially applied. +Success in this process allows the core to seamlessly update the configuration. However, if any issues arise, the +corresponding pipeline element is halted. In the case of processors and sinks, the associated pipeline is even marked +with a `needs attention` label, which comes apparent in the UI. diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-output-strategies.md b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-output-strategies.md new file mode 100644 index 000000000..feb224856 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-output-strategies.md @@ -0,0 +1,348 @@ +--- +id: extend-sdk-output-strategies +title: "SDK Guide: Output Strategies" +sidebar_label: "SDK: Output Strategies" +--- + +## Introduction +In StreamPipes, output strategies determine the output of a data processor. +As the exact input schema of a processor is usually not yet known at development time (as processors can be connected with any stream that matches their requirements), output strategies are a concept to define how an input data stream is transformed to an output data stream. + +The following reference describes how output strategies can be defined using the SDK. + +:::tip Code on Github + +For all examples, the code can be found on [Github](https://www.github.com/apache/streampipes-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/apache/streampipes/pe/examples/jvm/outputstrategy/) + +::: + + +## Reference + +The methods described below to create static properties are available in the ``ProcessingElementBuilder`` class and are usually used in the ``declareModel`` method of the controller class. + +As follows, we will use the following example event to explain how output strategies define the output of a data processor: + +```json +{ + "timestamp" : 1234556, + "temperature" : 37.0, + "deviceId" : "1" + +} +``` + +### Keep Output + +A ``KeepOutputStrategy`` declares that the output event schema will be equal to the input event schema. +In other terms, the processor does not change the schema, but might change the values of event properties. + +A keep output strategy can be defined as follows: + +```java + +@Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + + ".keep", "Keep output example example", "") + .requiredStream(StreamRequirementsBuilder. + create() + .requiredProperty(EpRequirements.anyProperty()) + .build()) + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + + // declaring a keep output strategy + .outputStrategy(OutputStrategies.keep()) + + .build(); + } + +``` + +According to the example above, the expected output event schema of the example input event would be: + +```json +{ + "timestamp" : 1234556, + "temperature" : 37.0, + "deviceId" : "1" + +} +``` + +Data processors that perform filter operations (e.g., filtering temperature values that are above a given threshold) are a common example for using keep output strategies. + + +### Fixed Output + +A ``FixedOutputStrategy`` declares that the data processor itself provides the event schema. The output schema does not depend on the input event. + +Fixed output strategies need to provide the event schema they produce at development time: + +```java + + @Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + + ".fixed", "Fixed output example", "") + .requiredStream(StreamRequirementsBuilder. + create() + .requiredProperty(EpRequirements.anyProperty()) + .build()) + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + + // the fixed output strategy provides the schema + .outputStrategy(OutputStrategies.fixed(EpProperties.timestampProperty("timestamp"), + EpProperties.doubleEp(Labels.from("avg", "Average value", ""), "avg", SO.Number))) + + .build(); + } + +``` + +In this example, we declare that the output schema always consists of two fields (``timestamp`` and ``avg``). + +Therefore, an output event should look like: + +```json +{ + "timestamp" : 1234556, + "avg" : 36.0 +} +``` + + +### Append Output + +An ``AppendOutputStrategy`` appends additional fields to a schema of an incoming event stream. For instance, data processors that perform enrichment operations usually make use of append output strategies. + +Similar to the fixed output strategy, the additional fields must be provided at development time in the controller method as follows: + +```java + @Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + + ".append", "Append output example", "") + + // boilerplate code not relevant here, see above + + // declaring an append output + .outputStrategy(OutputStrategies.append(EpProperties.integerEp(Labels.from("avg", + "The average value", ""), "avg", SO.Number))) + + .build(); + } +``` + +In this case, the output event would have an additional field ``avg``: + +```json +{ + "timestamp" : 1234556, + "temperature" : 37.0, + "deviceId" : "1", + "avg" : 123.0 + +} +``` + +### Custom Output + +In some cases, pipeline developers using the StreamPipes UI should be able to manually select fields from an input event schema. For such use cases, a ``CustomOutputStrategy`` can be used: + +```java + +@Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + + ".custom", "Custom output example", "") + + // boilerplate code not relevant here, see above + + // declaring a custom output + .outputStrategy(OutputStrategies.custom()) + + .build(); + } + +``` + +If a data processor defines a custom output strategy, the customization dialog in the pipeline editor will show a dialog to let users select the fields to keep: + +Number Parameter + +Taking our example, and assuming that the user selects both the ``timestamp`` and the ``temperature`` the expected output event should look like this: + +```json +{ + "timestamp" : 1234556, + "temperature" : 37.0 +} +``` + +How do we know which fields were selected once the data processor is invoked? Use the proper method from the extractor in the ``onInvocation`` method: + +```java +@Override + public ConfiguredEventProcessor onInvocation(DataProcessorInvocation graph, ProcessingElementParameterExtractor extractor) { + + List outputSelectors = extractor.outputKeySelectors(); + + return new ConfiguredEventProcessor<>(new DummyParameters(graph), DummyEngine::new); + } +``` + +### Transform Output + +A ``TransformOutputStrategy`` declares that one or more fields of an incoming event stream are transformed. Transformations can be applied to the datatype of the property, the runtime name of the property, or any other scheam-related declaration such as measurement units. + +#### Static Transform Operations + +Static transform operations do not depend on any user input (at pipeline development time) in order to know how to transform a field of an incoming event schema. + +Let's say our data processor transforms strings (that are actually a number) to a number datatype. In this case, we can use a static transform output strategy: + +```java + + @Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + + ".transform", "Transform output example example", "") + .requiredStream(StreamRequirementsBuilder. + create() + .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from + ("str", "The date property as a string", ""), PropertyScope.NONE) + .build()) + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + + // static transform operation + .outputStrategy(OutputStrategies.transform(TransformOperations + .staticDatatypeTransformation("str", Datatypes.Long))) + + .build(); + } + +``` + +Note the mapping property that we use to determine which field of the input event should be transformed. + +The expected output event would look like this: + +```json +{ + "timestamp" : 1234556, + "temperature" : 37.0, + "deviceId" : 1 +} +``` + +#### Dynamic Transform Operations + +Sometimes, user input depends on the exact transform output. Let's take a field renaming processor as an example, which lets the user rename a field from an input event schema to another field name. +For such use cases, we can use a ``DynamicTransformOperation``: + +```java + + @Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + + ".transform", "Transform output example example", "") + .requiredStream(StreamRequirementsBuilder. + create() + .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from + ("str", "The date property as a string", ""), PropertyScope.NONE) + .build()) + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + + // the text input to enter the new runtime name + .requiredTextparameter(Labels.from("new-runtime-name", "New Runtime Name", "")) + + // static transform operation + .outputStrategy(OutputStrategies.transform(TransformOperations + .dynamicRuntimeNameTransformation("str", "new-runtime-name"))) + + .build(); + } + +``` + +For dynamic transform operations, an additional identifier that links to another static property can be assigned and later be fetched in the ``onInvocation`` method. + +Assuming we want to rename the field ``temperature`` to ``temp``, the resulting output event should look like this: + +```json +{ + "timestamp" : 1234556, + "temp" : 37.0, + "deviceId" : 1 +} +``` + +### Custom Transform Output + +Finally, in some cases the output schema cannot be described at pipeline development time. For these (usually rare) cases, a ``CustomTransformOutput`` strategy can be used. + +In this case, a callback function will be invoked in the controller class just after a user has filled in any static properties and clicks on ``Save`` in the pipeline editor. + +To define a custom transform output, we need to implement an interface in the controller class: + +```java +public class CustomTransformOutputController extends + StandaloneEventProcessingDeclarer implements + ResolvesContainerProvidedOutputStrategy { + + +@Override + public EventSchema resolveOutputStrategy(DataProcessorInvocation processingElement, ProcessingElementParameterExtractor parameterExtractor) throws SpRuntimeException { + + } +} +``` + +In addition, the output strategy must be declared in the ``declareModel`` method: + +```java + +@Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + + ".customtransform", "Custom transform output example example", "") + .requiredStream(StreamRequirementsBuilder. + create() + .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from + ("str", "The date property as a string", ""), PropertyScope.NONE) + .build()) + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + + // declare a custom transform output + .outputStrategy(OutputStrategies.customTransformation()) + + .build(); + } + +``` + +Once a new pipeline using this data processor is created and the configuration is saved, the ``resolveOutputStrategy`` method will be called, so that an event schema can be provided based on the given configuration. An extractor instance (see the guide on static properties) is available to extract the selected static properties and the connected event stream. + +```java +@Override + public EventSchema resolveOutputStrategy(DataProcessorInvocation processingElement, ProcessingElementParameterExtractor parameterExtractor) throws SpRuntimeException { + return new EventSchema(Arrays + .asList(EpProperties + .stringEp(Labels.from("runtime", "I was added at runtime", ""), "runtime", SO.Text))); + } +``` + +In this example, the output event schema should look like this: + +```json +{ + "runtime" : "Hello world!" +} +``` + diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-static-properties.md b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-static-properties.md new file mode 100644 index 000000000..39c2d5d6c --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-static-properties.md @@ -0,0 +1,267 @@ +--- +id: extend-sdk-static-properties +title: "SDK Guide: Static Properties" +sidebar_label: "SDK: Static Properties" +--- + +## Introduction +Static properties represent user-faced parameters that are provided by pipeline developers. +Processing elements can specify required static properties, which will render different UI views in the pipeline editor. + +The following reference describes how static properties can be defined using the SDK. + +:::tip Code on Github + +For all examples, the code can be found on [Github](https://github.com/apache/streampipes-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/apache/streampipes/pe/examples/jvm/staticproperty). + +::: + +## Reference + +The methods described below to create static properties are available in the ``ProcessingElementBuilder`` and ``DataSinkBuilder`` classes and are usually used in the ``declareModel`` method of the controller class. + +### Mapping property + +In StreamPipes, processing elements usually operate on fields of an event stream. For instance, a filter processor operates on a specific field from an input stream (e.g., a field measuring the temperature). +Typically, pipeline developers should select the exact field where the operations is applied upon by themselves. +As this field is not yet known at pipeline element development time (as it is defined by the pipeline developer in the pipeline editor), mapping properties serve to map a stream requirement to a specific field from the actual input event stream. + +### Unary mapping property + +A unary mapping property maps a stream requirement to an actual field of an event stream. Therefore, the ``StreamRequirementsBuilder`` provides the opportunity to directly add a mapping property based along with a property requirement: + +```java +.requiredStream(StreamRequirementsBuilder. + create() + .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(), + Labels.from("mp-key", "My Mapping", ""), + PropertyScope.NONE) + .build()) +``` + +This leads to a selection dialog in the pipeline element customization which provides the user with a selection of all event properties (fields) from the input stream that match the specified property requirement: + +Text + +At invocation time, the value can be extracted in the ``onInvocation`` method as follows: + +```java +// Extract the mapping property value +String mappingPropertySelector = extractor.mappingPropertyValue("mp-key"); +``` + +Note that this method returns a ``PropertySelector``, which can be used by the event model to extract the actual value of this field. + +### N-ary mapping property + +N-ary mapping properties work similar to unary mapping properties, but allow the mapping of one requirement to multiple event properties matching the requirement: + +```java +.requiredStream(StreamRequirementsBuilder. + create() + .requiredPropertyWithNaryMapping(EpRequirements.numberReq(), + Labels.from("mp-key", "My Mapping", ""), + PropertyScope.NONE) + .build()) +``` + +This renders the following selection, where users can select more than one matching event property: + +Text + +The following snippet returns a list containing the property selectors of all event properties that have been selected: + +```java +// Extract the mapping property value +List mappingPropertySelectors = extractor.mappingPropertyValues("mp-key"); +``` + +### Free-Text Parameters + +A free-text parameter requires the pipeline developer to enter a single value - which can be a string or another primitive data type. +The input of free-text parameters can be restricted to specific value ranges or can be linked to the value set of a connected input data stream. + +#### Text Parameters + +A text parameter lets the user enter a string value. The following code line in the controller class + +```java +.requiredTextParameter(Labels.from(SP_KEY, "Example Name", "Example Description")) +``` + +leads to the following input dialog in the pipeline editor: + +Text + +Users can enter any value that will be converted to a string datatype. To receive the entered value in the ``onInvocation`` method, use the following method from the ``ParameterExtractor`` + +```java +String textParameter = extractor.singleValueParameter(SP_KEY, String.class); +``` + +#### Number parameters + +A number parameter lets the user enter a number value, either a floating-point number or an integer: + +```java +// create an integer parameter +.requiredIntegerParameter(Labels.from(SP_KEY, "Integer Parameter", "Example Description")) + +// create a float parameter +.requiredFloatParameter(Labels.from("float-key", "Float Parameter", "Example Description")) + +``` + +leads to the following input dialog in the pipeline editor only accepting integer values: + +Number Parameter + +The pipeline editor performs type validation and ensures that only numbers can be added by the user. To receive the entered value in the ``onInvocation`` method, use the following method from the ``ParameterExtractor`` + +```java +// Extract the integer parameter value +Integer integerParameter = extractor.singleValueParameter(SP_KEY, Integer.class); + +// Extract the float parameter value +Float floatParameter = extractor.singleValueParameter("float-key", Float.class); + +``` + +#### Numbers with value specification + +You can also specify the value range of a number-based free text parameter: + +```java +// create an integer parameter with value range +.requiredIntegerParameter(Labels.from(SP_KEY, "Integer Parameter", "Example Description"), 0, 100, 1) + +``` + +which renders the following input field: + +Number Parameter + +Receive the entered value in the same way as a standard number parameter. + +#### Free-text parameters linked to an event property + + +### Single-Value Selections + +Single-value selections let the user select from a pre-defined list of options. +A single-value selection requires to select exactly one option. + +```java +.requiredSingleValueSelection(Labels.from("id", "Example Name", "Example Description"), + Options.from("Option A", "Option B", "Option C")) + +``` + +Single-value selections will be rendered as a set of radio buttons in the pipeline editor: + +Number Parameter + +To extract the selected value, use the following method from the parameter extractor: + +```java +// Extract the selected value +String selectedSingleValue = extractor.selectedSingleValue("id", String.class); +``` + +:::tip Declaring options + +Sometimes, you may want to use an internal name that differs from the display name of an option. +For that, you can use the method Options.from(Tuple2{'<'}String, String{'>'}) and the extractor method selectedSingleValueInternalName. + +:::tip + + +### Multi-Value Selections + +Multi-value selections let the user select from a pre-defined list of options, where multiple or no option might be selected. + +```java +.requiredMultiValueSelection(Labels.from("id", "Example Name", "Example Description"), + Options.from("Option A", "Option B", "Option C")) + +``` + +Multi-value selections will be rendered as a set of checkboxes in the pipeline editor: + +Number Parameter + +To extract the selected value, use the following method from the parameter extractor: + +```java +// Extract the selected value +List selectedMultiValue = extractor.selectedMultiValues("id", String.class); +``` + +### Domain Concepts + +(coming soon...) + +### Collections + +You can also define collections based on other static properties. + +```java +// create a collection parameter +.requiredParameterAsCollection(Labels.from("collection", "Example Name", "Example " + + "Description"), StaticProperties.stringFreeTextProperty(Labels + .from("text-property","Text",""))) +``` + +While the items of the collection can be provided in the same way as the underlying static property, the UI provides buttons to add and remove items to the collections. + +Number Parameter + +To extract the selected values from the collection, use the following method from the parameter extractor: + +```java +// Extract the text parameter value +List textParameters = extractor.singleValueParameterFromCollection("collection", String.class); +``` + +### Runtime-resolvable selections + +In some cases, the options of selection parameters are not static, but depend on other values or might change at runtime. In this case, you can use runtime-resolvable selections. + +First, let your controller class implement ``ResolvesContainerProvidedOptions``: + +```java +public class RuntimeResolvableSingleValue extends + StandaloneEventProcessingDeclarer implements ResolvesContainerProvidedOptions { ... } +``` + +Next, define the parameter in the ``declareModel`` method: + +```java +// create a single value selection parameter that is resolved at runtime + .requiredSingleValueSelectionFromContainer(Labels.from("id", "Example Name", "Example " + + "Description")) +``` + +Finally, implement the method ``resolveOptions``, which will be called at runtime once the processor is used: + +```java + @Override + public List resolveOptions(String requestId, EventProperty linkedEventProperty) { + return Arrays.asList(new RuntimeOptions("I was defined at runtime", "")); + } +``` + +The UI will render a single-value parameter based on the options provided at runtime: + +Number Parameter + +The parameter extraction does not differ from the extraction of static single-value parameters. + + +:::info Multi-value selections + +Although this example shows the usage of runtime-resolvable selections using single value selections, the same also works for multi-value selections! + +::: + + diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-stream-requirements.md b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-stream-requirements.md new file mode 100644 index 000000000..409c5164d --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-sdk-stream-requirements.md @@ -0,0 +1,181 @@ +--- +id: extend-sdk-stream-requirements +title: "SDK Guide: Stream Requirements" +sidebar_label: "SDK: Stream Requirements" +--- + +## Introduction + +Data processors and data sinks can define ``StreamRequirements``. Stream requirements allow pipeline elements to express requirements on an incoming event stream that are needed for the element to work properly. +Once users create pipelines in the StreamPipes Pipeline Editor, these requirements are verified against the connected event stream. +By using this feature, StreamPipes ensures that only pipeline elements can be connected that are syntactically and semantically valid. + +This guide covers the creation of stream requirements. Before reading this section, we recommend that you make yourself familiar with the SDK guide on [data processors](extend-first-processor). + + +:::tip Code on Github + +For all examples, the code can be found on [Github](https://www.github.com/apache/streampipes-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/apache/streampipes/pe/examples/jvm/requirements/). + +::: + + +## The StreamRequirementsBuilder + +Stream requirements can be defined in the ``declareModel`` method of the pipeline element class. Start with a method body like this: + +```java + +@Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION) + .requiredStream(StreamRequirementsBuilder. + create() + + .build()) + + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + .outputStrategy(OutputStrategies.keep()) + + .build(); + } +``` + +The ``StreamRequirementsBuilder`` class provides methods to add stream requirements to a pipeline element. + +## Requirements on primitive fields + +As a very first example, let's assume we would like to create a data processor that filters numerical values that are above a given threshold. +Consequently, any data stream that is connected to the filter processor needs to provide a numerical value. + +The stream requirement would be assigned as follows: + +```java +@Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION) + .requiredStream(StreamRequirementsBuilder + .create() + .requiredProperty(EpRequirements.numberReq()) + .build()) + + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + .outputStrategy(OutputStrategies.keep()) + + .build(); + } +``` + +Note the line starting with ``requiredProperty``, which requires any stream to provide a datatype of type ``number``. + +In many cases, you'll want to let the user select a specific field from a data stream from all available fields that match the specified requirement. For that, you simply use the method ``requiredPropertyWithUnaryMapping`` as follows: + +```java +@Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION) + .requiredStream(StreamRequirementsBuilder + .create() + .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(), + Labels.from("number-mapping", "The value that should be filtered", ""), PropertyScope.NONE) + .build()) + + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + .outputStrategy(OutputStrategies.keep()) + + .build(); + } +``` + +See also the developer guide on [static properties](extend-sdk-static-properties) to better understand the usage of ``MappingProperties``. + +Requirements on primitive fields can be specified for all common datatypes: + +```java + @Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.requirements" + + ".simple", "Simple requirements specification examples", "") + .requiredStream(StreamRequirementsBuilder. + create() + .requiredProperty(EpRequirements.numberReq()) // any number + .requiredProperty(EpRequirements.doubleReq()) // any field of type double + .requiredProperty(EpRequirements.booleanReq()) // any field of type boolean + .requiredProperty(EpRequirements.integerReq()) // any field of type integer + .requiredProperty(EpRequirements.stringReq()) // any field of type string + + .requiredProperty(EpRequirements.anyProperty()) // any field allowed (no restriction) + .requiredProperty(EpRequirements.timestampReq()) // any timestamp field + .build()) + + + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + .outputStrategy(OutputStrategies.keep()) + + .build(); +``` + +### Specifying semantics + +For some algorithms, only specifying the datatype is not sufficient. Let's consider a geofencing algorithm that detects the precense some geospatial coordinate (e.g., from a vehicle) within a given location. + +You could specify something like this: + +```java + StreamRequirementsBuilder + .create() + .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE) + .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE) + .build() +``` + +However, this would allow users to create strange pipelines as users could connect any stream containing a double value to our geofencing algorithm. +To avoid such situations, you can also specify requirements based on the semantics of a field: + +```java + StreamRequirementsBuilder + .create() + .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Latitude), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE) + .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Longitude), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE) + .build() +``` + +Note that in this case, we make use of Schema.org's ``Latitude`` concept ([https://schema.org/latitude](https://schema.org/latitude)). StreamPipes already includes popular vocabularies for specifying semantics. You are also free to use your own vocabularies. + + +## Requirements on lists + +Similarly to primitive requirements, you can define processors that require data streams with list fields, see the following examples: + +```java +@Override + public DataProcessorDescription declareModel() { + return ProcessingElementBuilder.create("org.streampipes.examples.requirements" + + ".list", "List requirements specification examples", "") + .requiredStream(StreamRequirementsBuilder. + create() + .requiredProperty(EpRequirements.listRequirement(Datatypes.Integer)) + .requiredProperty(EpRequirements.listRequirement(Datatypes.Double)) + .requiredProperty(EpRequirements.listRequirement(Datatypes.Boolean)) + .requiredProperty(EpRequirements.listRequirement(Datatypes.String)) + .build()) + + + .supportedProtocols(SupportedProtocols.kafka()) + .supportedFormats(SupportedFormats.jsonFormat()) + .outputStrategy(OutputStrategies.keep()) + + .build(); + } +``` + +## Requirements on nested properties + +(coming soon, see the Javadoc for now) + + + diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-setup.md b/website-v2/versioned_docs/version-0.95.1/06_extend-setup.md new file mode 100644 index 000000000..12a96eb8e --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-setup.md @@ -0,0 +1,50 @@ +--- +id: extend-setup +title: Development Setup +sidebar_label: Development Setup +--- + +Pipeline elements in StreamPipes are provided as standalone microservices. New pipeline elements can be easily developed using the provided Maven archetypes and can be installed in StreamPipes at runtime. + +In this section, we describe our recommended minimum setup for locally setting up a development instance of StreamPipes needed to develop, run and test new pipeline elements. + +## IDE & required dev tools +StreamPipes does not have specific requirements on the IDE - so feel free to choose the IDE of your choice. +The only requirements in terms of development tools are that you have Java 17 and Maven installed. + +## StreamPipes CLI: Docker-based local StreamPipes instance +In order to quickly test developed pipeline elements without needing to install all services required by StreamPipes, we provide a CLI tool that allows you to selectively start StreamPipes components. +The CLI tool allows to switch to several templates (based on docker-compose) depending on the role. + +The documentation on the usage of the CLI tool is available [here](06_extend-cli.md). + +## Override the SP_HOST variable + +By default, the backend/core of StreamPipes registers itself within StreamPipes' service discovery mechanism using an auto-discovered hostname. +Usually, this will be an IP address from the Docker network, which is not resolvable from outside. Therefore, for local development you need to override the hostname with an IP address which is accessible from your local host where you develop extensions. +When using the CLI, open the CLI folder ``installer/cli``, navigate to ``deploy/standalone/backend``, open the ``docker-compose.dev.yml`` file and add the SP_HOST env variable, e.g. + +``` +version: "3.4" +services: + backend: + ports: + - "8030:8030" + environment: + - SP_HOST=host.docker.internal +``` + +Note that host.docker.internal will work as an alias under Docker for Desktop on Windows and Mac, but not on Linux or M1. In this case, provide a resolvable hostname or IP address manually. + +## Starter projects + +Now, once you've started the development instance, you are ready to develop your very first pipeline element. +Instead of starting from scratch, we recommend using our provided maven archetypes: + +### Maven archetypes + +Create the Maven archetype as described in the [Maven Archetypes](06_extend-archetypes.md) guide. + +### Examples + +We provide several examples that explain the usage of some concepts in this [Github repo](https://github.com/apache/streampipes-examples). diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-adapters.md b/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-adapters.md new file mode 100644 index 000000000..4e95cba9a --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-adapters.md @@ -0,0 +1,612 @@ +--- +id: extend-tutorial-adapters +title: "Tutorial: Build Custom Adapters" +sidebar_label: "Tutorial: Adapters" +--- + +In this tutorial, we will create a new data source consisting of a single data stream. +By the end of the tutorial, you will be able to implement custom adapters that allow you to connect to data sources +other than those officially supported by StreamPipes. +To do this, we will split the tutorial into two parts. +The [first part](#building-a-basic-adapter) focuses on creating the adapter and defining the event stream. +At the end, we will have a working adapter that produces an event stream that can be used in StreamPipes. +This adapter does not provide any way to configure its behavior, so in +the [second part](#building-a-more-advanced-adapter-by-processing-ui-input) of the tutorial +to show how we can extend our existing adapter to be configurable via the UI. + +:::info +This tutorial shows how to build your own type of adapter. +It is intended for people who are interested in extending StreamPipes to meet their own needs. +If you are here to explore StreamPipes and are interested in using an adapter, you may want to +continue [here](./03_use-connect.md). +::: + +## Objective + +We are going to create an adapter that will simulate a stream of data generated by a control station in a logistics +center that is used to sort packages. +This station consists of two sensors: a light barrier that detects when a package passes through, and a weight sensor. + +This sensor produces a continuous stream of events containing the current time stamp, an indicator of whether a package +is present or the conveyor is empty, and the weight of the package in kilograms. +The events are published in JSON format as follows + +```json +{ + "timestamp": 1697720916959, + "parcelPresent": true, + "weight": 3.520 +} +``` + +In the following section, we will show you how to develop an adapter that is capable of generating this stream so that +it is available for further processing in StreamPipes. + +## Project Set Up + +Instead of creating a new project from scratch, we recommend to use our Maven archetype to create a new project +skeleton (`streampipes-archetype-extensions-jvm`). +Enter the following command in a command line of your choice (please ensure +that [Apache Maven](https://maven.apache.org/install.html) isinstalled): + +```bash +mvn archetype:generate \ +-DarchetypeGroupId=org.apache.streampipes -DarchetypeArtifactId=streampipes-archetype-extensions-jvm \ +-DarchetypeVersion=0.93.0 -DgroupId=org.apache.streampipes \ +-DartifactId=streampipes-archetype-extensions-jvm -DclassNamePrefix=ParcelControlStation -DpackageName=parcelcontrol +``` + +This command will ask you for input twice, you can just skip both of them by hitting *enter*. +The first dialog sets the version to use for our `streampipes-archetype-extensions-jvm` module. +Feel free to change this if you like. + +```bash +Define value for property 'version' 1.0-SNAPSHOT: : + + Y: : +``` + +The `mvn archetype:generate` command generates some required files, the required file structure, and some boilerplate +code. +The generated file structure should look like the following: + +:::info +Note that you can customize the parameters of the mvn command to affect the file structure and file naming. +::: + +```bash + +|streampipes-archetype-extensions # name is determined by '-DartifactId' +|-- development +| |-- env +|-- src +| |-- main +| | |-- java.org.apache.streampipes # name after .java. is determined by '-DgroupId' +| | | |-- pe.parcelcontrol # name after .pe. is determined by '-DpackageName' +| | | | |-- ParcelControlStationDataProcessor.java # class name is determined by '-DclassNamePrefix' +| | | | |-- ParcelControlStationDataSink.java +| | | | |-- ParcelControlStationGenericAdapter.java +| | | | |-- ParcelControlStationSpecificAdapter.java +| | | |-- Init.java +| | |-- resources +| | | |-- org.apache.streampipes.pe.parcelcontrol.genericadapter +| | | | |-- documentation.md +| | | | |-- icon.png +| | | | |-- strings.en +| | | |-- org.apache.streampipes.pe.parcelcontrol.processor +| | | | |-- documentation.md +| | | | |-- icon.png +| | | | |-- strings.en +| | | |-- org.apache.streampipes.pe.parcelcontrol.sink +| | | | |-- documentation.md +| | | | |-- icon.png +| | | | |-- strings.en +| | | |-- org.apache.streampipes.pe.parcelcontrol.specificadapter +| | | | |-- documentation.md +| | | | |-- icon.png +| | | | |-- strings.en +| |-- test.java.org.apache.streampipes # name after .java. is determined by '-DgroupId' +| | |-- InitTest.java +|-- Dockerfile +|-- pom.xml + +``` + +:::tip +In addition to the basic project skeleton, the sample project also includes a sample `Dockerfile` that you can use to +package your application into a Docker container. +::: + +## Building a Basic Adapter + +In the following, we will demonstrate how to use the boilerplate code generated by the Maven plugin ( +see [Project setup](#project-setup)). +Within this section, we will focus on creating an event stream that can be used within StreamPipes. +The following section shows how to configure the created adapter with UI input. + +Attentive readers may have noticed that two adapter classes have been generated. +We will focus on the `ParcelControlStationSpecificAdapter` first, the `ParcelControlStationSimulatorGenericAdapter` will +be used later for more advanced adapter features. +First, let us take a look at the `ParcelControlStationSpecificAdapter.java` file as generated by the Maven +archetype. + +```java jsx showLineNumbers +package org.apache.streampipes.pe.parcelcontrol; + +import org.apache.streampipes.commons.exceptions.connect.AdapterException; +import org.apache.streampipes.extensions.api.connect.IAdapterConfiguration; +import org.apache.streampipes.extensions.api.connect.IEventCollector; +import org.apache.streampipes.extensions.api.connect.StreamPipesAdapter; +import org.apache.streampipes.extensions.api.connect.context.IAdapterGuessSchemaContext; +import org.apache.streampipes.extensions.api.connect.context.IAdapterRuntimeContext; +import org.apache.streampipes.extensions.api.extractor.IAdapterParameterExtractor; +import org.apache.streampipes.model.AdapterType; +import org.apache.streampipes.model.connect.guess.GuessSchema; +import org.apache.streampipes.sdk.builder.adapter.AdapterConfigurationBuilder; +import org.apache.streampipes.sdk.builder.adapter.GuessSchemaBuilder; +import org.apache.streampipes.sdk.helpers.Labels; +import org.apache.streampipes.sdk.helpers.Locales; + +import java.util.HashMap; +import java.util.Map; + +public class ParcelControlStationSpecificAdapter implements StreamPipesAdapter { + + private boolean running = false; + + @Override + public IAdapterConfiguration declareConfig() { + return AdapterConfigurationBuilder.create( + "org.apache.streampipes.pe.parcelcontrol.specificadapter", + ParcelControlStationSpecificAdapter::new + ) + .withAssets(Assets.DOCUMENTATION, Assets.ICON) + .withCategory(AdapterType.Manufacturing) + .withLocales(Locales.EN) + .buildConfiguration(); + } + + @Override + public void onAdapterStarted(IAdapterParameterExtractor extractor, + IEventCollector collector, + IAdapterRuntimeContext adapterRuntimeContext) throws AdapterException { + + Runnable demo = () -> { + while (running) { + // make event + Map event = new HashMap<>(); + // forward the event to the adapter pipeline + collector.collect(event); + } + }; + running = true; + new Thread(demo).start(); + } + + @Override + public void onAdapterStopped(IAdapterParameterExtractor extractor, + IAdapterRuntimeContext adapterRuntimeContext) throws AdapterException { + + // do cleanup + running = false; + } + + @Override + public GuessSchema onSchemaRequested(IAdapterParameterExtractor extractor, + IAdapterGuessSchemaContext adapterGuessSchemaContext) throws AdapterException { + + // build the schema by adding properties to the schema builder and a preview if possible + return GuessSchemaBuilder + .create() + .build(); + } + } + +``` + +The class extends `StreamPipesAdapter`, which is the interface that all adapters within StreamPipes must implement. +This interface requires us to implement four methods: + +* `declareConfig()`: This method is expected to return the configuration of the adapter. The configuration includes + metadata about the adapter and its input parameters. +* `onAdapterStarted()`: This method is expected to contain the actual adapter logic. It is called when the adapter is + started, and is responsible for sending incoming data to StreamPipes as an event. +* `onAdapterStopped()`: This method is called when the adapter is stopped and is responsible for gracefully exiting the + adapter. + gracefully and usually performs some cleanup tasks. +* `onSchemaRequested()`: This method is expected to return the schema of the event stream. This is ideally done + dynamically based on some incoming data (*guess*) or provided statically if not otherwise possible. + +### Describing the Adapter via the Configuration + +The standard code generated here is already sufficient for us. +So let's have a quick look at the important aspects: + +* `Line 4`: Here we define a unique identifier for our adapter. This allows us to identify all instances of the same + adapter. Including your own namespace is always a good choice to avoid conflicts. +* `Line 7`: Here we define what assets are available for this adapter. In this case, we provide a documentation file and + an icon. Both assets are located in the `resource' directory (see file tree above). +* `Line 8`: This defines a rough categorization along predefined adapter types. +* `Line 9`: Here we define which locales are available for this adapter. Since we only provide one `strings.en' file so + far (see file tree above), the current selection is sufficient. Theoretically you can support multiple languages, but + this is not fully supported yet. + +```java jsx {4,7-9} showLineNumbers + @Override + public IAdapterConfiguration declareConfig() { + return AdapterConfigurationBuilder.create( + "org.apache.streampipes.pe.parcelcontrol.specificadapter", + ParcelControlStationSpecificAdapter::new + ) + .withAssets(Assets.DOCUMENTATION, Assets.ICON) + .withCategory(AdapterType.Manufacturing) + .withLocales(Locales.EN) + .buildConfiguration(); + } +``` + +Before we continue, let's quickly have a look at the `strings.en` file that defines our locales. +Here we can define a meaningful and human-readable adapter tile in the first line and a short description: + +```text +org.apache.streampipes.pe.parcelcontrol.specificadapter.title=Parcel Control Station (simple) +org.apache.streampipes.pe.parcelcontrol.specificadapter.description=This adapter simulates data coming from a parcel control station in a logistics center. +``` + +Now we have successfully configured our adapter and prepared all descriptive elements, we can focus on the actual logic. + +### Creating the Data Stream + +The logic that creates events that are then populated via StreamPipes is defined in `onAdapterStarted()`. +Within this method, connectors usually connect to the data source and extract data. +In our case, however, we simply want to create some sample data directly. +The two main parts that should always happen within this method are highlighted in the provided skeleton code: + +* `Line 10`: Creating an event is crucial for our adapters. This event is then filled with data by the adapter before it + is distributed. +* `Line 13`: The event must finally be passed to the `collector`, which then takes the data and distributes it within + StreamPipes in the form of a [data stream](./02_concepts-overview.md#data-stream). + +```java jsx {10,13} showLineNumbers +@Override +public void onAdapterStarted(IAdapterParameterExtractor extractor, + IEventCollector collector, + IAdapterRuntimeContext adapterRuntimeContext) throws AdapterException { + + Runnable demo = () -> { + while (running) { + + // make event + Map event = new HashMap<>(); + + // forward the event to the adapter pipeline + collector.collect(event); + } + }; + running = true; + new Thread(demo).start(); +} +``` + +So the only thing left to do is to create the actual events. +In our scenario, we want to create two types of events: one describing an empty conveyor and one describing a detected +and weighed package. +To keep the implementation simple, we simply want to have a parcel event every five seconds. We can implement this as +follows: + +```java + Runnable parcelControl = () -> { + while (running) { + + // get the current time in seconds + long timestamp = System.currentTimeMillis(); + long timeInSeconds = (int) timestamp / 1000; + + // make event + Map event = new HashMap<>(); + event.put("timestamp", timestamp); + + if (timeInSeconds % 5 == 0) { + event.put("parcelPresent", true); + event.put("weight", ThreadLocalRandom.current().nextDouble(0, 10)); + + } else { + event.put("parcelPresent", false); + event.put("weight", 0); + } + + // forward the event to the adapter pipeline + collector.collect(event); + + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + }; + running = true; + new Thread(parcelControl).start(); +``` + +This is already enough to get a data stream into StreamPipes. +As the next step we need to describe to event schema. + +### Defining the Event Schema + +In StreamPipes, each data stream comes with an event schema that describes what information the event contains, +in what data formats, and some semantic type information. +This allows StreamPipes to provide easy and convenient stream handling with a lot of automatic conversions and +validations. +For example, whether a particular data processor is suitable for a given event stream. +This event schema is provided by `onSchemaRequested()`: + +```java +@Override +public GuessSchema onSchemaRequested(IAdapterParameterExtractor extractor, + IAdapterGuessSchemaContext adapterGuessSchemaContext) throws AdapterException { + + // build the schema by adding properties to the schema builder and a preview if possible + return GuessSchemaBuilder + .create() + .build(); + } + +``` + +Normally, the event schema is determined automatically and dynamically, since an adapter is usually quite generic (read +more in the [Advanced section](#advanced)). +But in our case, we already know the event schema, and it never changes, so we can just define it: + +```java jsx {3,13-20} showLineNumbers +@Override +public GuessSchema onSchemaRequested(IAdapterParameterExtractor extractor, + IAdapterGuessSchemaContext adapterGuessSchemaContext) throws AdapterException { + + // build the schema by adding properties to the schema builder and a preview if possible + return GuessSchemaBuilder.create() + .property(timestampProperty("timestamp")) + .sample("timestamp", System.currentTimeMillis()) + .property(PrimitivePropertyBuilder + .create(Datatypes.Boolean, "parcelPresent") + .label("Parcel Present") + .description("Indicates if a parcel is weighed.") + .domainProperty(SO.BOOLEAN) + .scope(PropertyScope.MEASUREMENT_PROPERTY) + .build()) + .sample("parcelPresent", true) + .property(PrimitivePropertyBuilder + .create(Datatypes.Double, "weight") + .label("Parcel Weight") + .description("Parcel weight") + .domainProperty(SO.WEIGHT) + .scope(PropertyScope.MEASUREMENT_PROPERTY) + .build()) + .sample("weight", 3.520) + .build(); +``` + +An attribute of an Event is referred to as `property` in StreamPipes. +So in our case we have three properties. +Since StreamPipes creates a sample event in the UI when configuring the adapter ( +see [here](./03_use-connect.md#schema-editor)), +providing a meaningful sample value for every property allows StreamPipes to demonstrate its full potential. + +Since every event schema is required to have a timestamp property, we provide a convenience definition (see `line 3`). +For all other properties the recommend way of definition is using the `PrimitivePropertyBuilder` (see `line 13-20`) and +consists of the following steps: + +* `Line 14`: every property must have a data type specified and a property name +* `Line 15`: In addition to the property name we can define a label that is designed for the end user and shown in the + UI. +* `Line 16`: Assigns a human-readable description to the event property. The description is used in the StreamPipes UI + for better explaining users the meaning of the property. +* `Line 17`: Specifies the semantics of the property (e.g., whether a double value stands for weight or temperature + value). +* `Line 18`: Assigns a property scope to the event property. This determines how the property is handled internally. + +:::note +StreamPipes does not require you to provide all of this information about a property. +Anything beyond line `14` (up to line `20`) is optional, but the more you provide, the better StreamPipes can show it's +full potential and feature richness. +::: + +This makes our adapter almost complete, there is only one little step left. + +### Defining the Adapter Termination + +As a final step, we need to define what should happen if the adapter is stopped. +In general, the adapter should not fire any events after that. +Normally, this step includes things like closing connections and clearing resources. +In our case this is quite simple, we just need to stop our thread: + +```java +@Override +public void onAdapterStopped(IAdapterParameterExtractor extractor, + IAdapterRuntimeContext adapterRuntimeContext) throws AdapterException { + + // do cleanup + running = false; +} +``` + +Now it's time to start our adapter and observe it in action! + +### Register and Run the Adapter + +Before we actually use our adapter, let's take a quick look at the `Init` class. This class is responsible for +registering our adapter service with the core to make the adapter available in StreamPipes. +This is done within `provideServiceDefinition()`. Since we don't have the generic adapter ready yet, +we'll comment out its registration (`line 7`). Now we can run the `Init` class to register the adapter with your running +StreamPipes instance. If you don't have a running instance at your hand, +you can take a look at our [Installation Guide](./01_try-installation.md). + +```java jsx {7-8} showLineNumbers +@Override +public SpServiceDefinition provideServiceDefinition() { + return SpServiceDefinitionBuilder.create("org.apache.streampipes", + "human-readable service name", + "human-readable service description", 8090) + .registerRuntimeProvider(new StandaloneStreamPipesRuntimeProvider()) + //.registerAdapter(new ParcelControlStationGenericAdapter()) + .registerAdapter(new ParcelControlStationSpecificAdapter()) + .registerMessagingFormats( + new JsonDataFormatFactory(), + new CborDataFormatFactory(), + new SmileDataFormatFactory(), + new FstDataFormatFactory()) + .registerMessagingProtocols( + new SpKafkaProtocolFactory(), + new SpJmsProtocolFactory(), + new SpMqttProtocolFactory(), + new SpNatsProtocolFactory(), + new SpPulsarProtocolFactory()) + .build(); +} + ``` + +:::tip +When executing the `main()` method of the `Init` class, make sure that all environment variables are set from +the `development/env` file are set. +If they are not set, the adapter may not be able to register with StreamPipes. +::: + +Once you see the following log message in the console, the adapter is ready, and you can switch to the UI of your +StreamPipes instance. + +```bash +s.s.e.c.ConnectWorkerRegistrationService : Successfully connected to master. Worker is now running. +``` + +Please go to the connect module and click on `New Adapter`, +you should now be able to see your adapter `Parcel Control Station (simple)`: +Demo of parcel adapter + +The adapter runs successfully in StreamPipes, you can now play around with the data stream that the +adapter, or continue with the next section to learn how to make an adapter configurable through the UI. + +### Building a more Advanced Adapter by Processing UI Input + +In this section, we will extend our previous build apter by adding the ability to configure the minimum and maximum +package +in the UI from which the weight value is retrieved. +The beauty of building adapters for StreamPipes is that you don't have to worry about the UI. +StreamPipes provides a set of pre-built input elements for adapters that you can simply add to your adapter +configuration. +So the first thing we need to customize is `declareConfig()`: + +```java jsx {10-11} showLineNumbers +@Override +public IAdapterConfiguration declareConfig() { + return AdapterConfigurationBuilder.create( + "org.apache.streampipes.pe.parcelcontrol.specificadapter", + ParcelControlStationSpecificAdapter::new + ) + .withAssets(Assets.DOCUMENTATION, Assets.ICON) + .withCategory(AdapterType.Manufacturing) + .withLocales(Locales.EN) + .requiredFloatParameter(Labels.withId("min-weight"), 0.0f) + .requiredFloatParameter(Labels.withId("max-weight"), 10.f) + .buildConfiguration(); +} + +``` + +In line `9-10` we have introduced two input parameters that expect float values as input. They have a default value +of `0` or `10` resp. The defined identifier (`min-weight` and `max-weight`) can be used two provide a caption and +a description via the `strings.en` file: + +```text +min-weight.title=Minimum Parcel Weight +min-weight.description=The lower bound from which the weight values are sampled randomly. + +max-weight.title=Maximum Parcel Weight +max-weight.description=The upper bound from which the weight values are sampled randomly. +``` + +As a last step, we now need to modify the calculation of the parcel weight, so that the provided parameters are actually +applied. +This is done in `onAdapterStarted()`. + +```java jsx {6-9,24} showLineNumbers +@Override +public void onAdapterStarted(IAdapterParameterExtractor extractor, + IEventCollector collector, + IAdapterRuntimeContext adapterRuntimeContext) throws AdapterException { + + var ex = extractor.getStaticPropertyExtractor(); + + float minWeight = ex.singleValueParameter("min-weight", Float.class); + float maxWeight = ex.singleValueParameter("max-weight", Float.class); + + Runnable parcelControl = () -> { + while (running) { + + // get the current time in seconds + long timestamp = System.currentTimeMillis(); + long timeInSeconds = (int) timestamp / 1000; + + // make event + Map event = new HashMap<>(); + event.put("timestamp", timestamp); + + if (timeInSeconds % 5 == 0) { + event.put("parcelPresent", true); + event.put("weight", ThreadLocalRandom.current().nextDouble(minWeight, maxWeight)); + + } else { + event.put("parcelPresent", false); + event.put("weight", 0); + } + + // forward the event to the adapter pipeline + collector.collect(event); + + try { + Thread.sleep(1000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + }; + running = true; + new Thread(parcelControl).start(); +} +``` + +* line `6-9`: We use an `StaticPropertyExtractor` to retrieve both user inputs +* line `24`: We calculate the parcel weight by passing the configured values vor the minimum and maximum value. + +You can now run the `main()` method of the `Init` class to register the adapter at StreamPipes. +The UI dialog to create a new instance of our parcel control station adapter looks now the following: +Adapter with UI dialog + +:::caution +Please make sure that you uninstall the parcel adapter in `Install Pipeline Elements` before +you restart the execution of the `Init` class, if you have already done so. +Otherwise, the changes made in this section will have no effect. +::: + +### Read More + +Congratulations! You've just created your first StreamPipes adapter 🎉
+ +There are many more things to explore and data sources can be defined in much more detail. +If this is of interest to you, the [advanced section](#advanced) will satisfy your needs. + +If anything within this tutorial did not work for you or you had problems following it, +please feel free to provide some feedback by opening an [issue on GitHub](https://github.com/apache/streampipes/issues/new?assignees=&labels=bug%2Cdocumentation%2Cwebsite&projects=&template=doc_website_issue_report.yml). + + diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-data-processors.md b/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-data-processors.md new file mode 100644 index 000000000..4c2d5ee25 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-data-processors.md @@ -0,0 +1,454 @@ +--- +id: extend-tutorial-data-processors +title: "Tutorial: Data Processors" +sidebar_label: "Tutorial: Data Processors" +--- + +In this tutorial, we will add a new data processor. + +From an architectural point of view, we will create a self-contained service that includes the description of the data +processor and an implementation. + +## Objective + +We are going to create a new data processor that realizes a simple geofencing algorithm - we detect vehicles that enter +a specified radius around a user-defined location. +This pipeline element will be a generic element that works with any event stream that provides geospatial coordinates in +form of a latitude/longitude pair. + +The algorithm outputs every location event once the position has entered the geofence. + +:::note + +The implementation in this tutorial is pretty simple - our processor will fire an event every time the GPS location is +inside the geofence. +In a real-world application, you would probably want to define a pattern that recognizes the _first_ event a vehicle +enters the geofence. + +This can be easily done using a CEP library. + +::: + +## Project setup + +Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project +skeleton (streampipes-archetype-extensions-jvm). +Enter the following command in a command line of your choice (Apache Maven needs to be installed): + +``` +mvn archetype:generate \ +-DarchetypeGroupId=org.apache.streampipes -DarchetypeArtifactId=streampipes-archetype-extensions-jvm \ +-DarchetypeVersion=0.93.0 -DgroupId=my.groupId \ +-DartifactId=my-example -DclassNamePrefix=MyExample -DpackageName=mypackagename +``` + +You will see a project structure similar to the structure shown in the [archetypes](06_extend-archetypes.md) section. + +:::tip + +Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your +application into a Docker container. + +::: + +Now you're ready to create your first data processor for StreamPipes! + +## Adding data processor requirements + +First, we will add a new stream requirement. +Create a new class `GeofencingProcessor` which should look as follows: + +```java +package org.apache.streampipes.pe.example; + +import org.apache.streampipes.extensions.api.pe.IStreamPipesDataProcessor; +import org.apache.streampipes.extensions.api.pe.config.IDataProcessorConfiguration; +import org.apache.streampipes.extensions.api.pe.context.EventProcessorRuntimeContext; +import org.apache.streampipes.extensions.api.pe.param.IDataProcessorParameters; +import org.apache.streampipes.extensions.api.pe.routing.SpOutputCollector; +import org.apache.streampipes.model.runtime.Event; +import org.apache.streampipes.sdk.builder.ProcessingElementBuilder; +import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder; +import org.apache.streampipes.sdk.builder.processor.DataProcessorConfiguration; +import org.apache.streampipes.sdk.helpers.EpProperties; +import org.apache.streampipes.sdk.helpers.EpRequirements; +import org.apache.streampipes.sdk.helpers.Labels; +import org.apache.streampipes.sdk.helpers.OutputStrategies; +import org.apache.streampipes.sdk.helpers.SupportedFormats; +import org.apache.streampipes.sdk.helpers.SupportedProtocols; +import org.apache.streampipes.vocabulary.SO; + +public class GeofencingProcessor implements IStreamPipesDataProcessor { + + private static final String LATITUDE_CENTER = "latitude-center"; + private static final String LONGITUDE_CENTER = "longitude-center"; + + + public IDataProcessorConfiguration declareConfig() { + return DataProcessorConfiguration.create( + GeofencingProcessor::new, + ProcessingElementBuilder.create( + "org.apache.streampipes.tutorial-geofencing" + ) + .category(DataProcessorType.ENRICH) + .withAssets(Assets.DOCUMENTATION, Assets.ICON) + .build()); + } + + @Override + public void onPipelineStarted(IDataProcessorParameters params, + SpOutputCollector collector, + EventProcessorRuntimeContext runtimeContext) { + + } + + @Override + public void onEvent(Event event, + SpOutputCollector collector) { + + } + + @Override + public void onPipelineStopped() { + + } +} + + +``` + +In this class, we need to implement three methods: The `declareConfig` method is used to define abstract stream +requirements such as event properties that must be present in any input stream that is later connected to the element +using the StreamPipes UI. +The second method, `onPipelineStarted` is triggered once a pipeline is started. +The `onEvent` method is called for every incoming event. +Finally, the `onPipelineStopped` method is called once the pipeline is stopped. + +Similar to data sources, the SDK provides a builder class to generate the description for data processors. + +The current code within the `declareConfig` method creates a new data processor with the ID. +The ID is used as the internal ID of the data processor, but also used to reference additional assets in the `resources` folder, such as a `strings.en` file, used to configure labels and description, and a `documentation.md` file, which will later servce as a markdown documentation in the UI. +But first, we will add some _stream requirements_ to the description. As we'd like to develop a generic pipeline element that +works with any event that provides a lat/lng pair, we define two stream requirements as stated below: + +```java +.requiredStream(StreamRequirementsBuilder + .create() + .requiredPropertyWithUnaryMapping( + EpRequirements.domainPropertyReq(Geo.LAT), + Labels.from("latitude-field","Latitude","The event property containing the latitude value"), + PropertyScope.MEASUREMENT_PROPERTY + ) + .requiredPropertyWithUnaryMapping( + EpRequirements.domainPropertyReq(Geo.LNG), + Labels.from("longitude-field","Longitude","The event property containing the longitude value"), + PropertyScope.MEASUREMENT_PROPERTY + ) + .build()) +``` + +The first line, `.requiredStream()` defines that we want a data processor with exactly one input stream. Adding more +stream requirements would create elements with multiple input connectors in StreamPipes. +Stream requirements can be assigned by using the `StreamRequirementsBuilder` class. +In our example, we define two requirements, so-called _domain property requirements_. In contrast to _data type +requirements_ where we'd expect an event property with a field of a specific data type (e.g., float), domain property +requirements expect a specific semantic type (called domain property), e.g., from a vocabulary such as the WGS84 Geo vocab. + +Once a pipeline is deployed, we are interested in the actual field (and its field name) that contains the latitude and +longitude values. +In some cases, there might be more than one field that satisfies a property requirement, and we would like users to +select the property the geofencing component should operate on. +Therefore, our example uses the method `requiredPropertyWithUnaryMapping`, which will map a requirement to a real event +property of an input stream and let the user choose the appropriate field in the StreamPipes UI when pipelines are +defined. + +Finally, the `PropertyScope` indicates that the required property is a measurement value (in contrast to a dimension +value). This allows us later to provide improved user guidance in the pipeline editor. + +Similar to mapping properties, text parameters have an internalId (radius), a label and a description. +In addition, we can assign a _value specification_ to the parameter indicating the value range we support. +Our example supports a radius value between 0 and 1000 with a granularity of 1. +In the StreamPipes UI, a required text parameter is rendered as a text input field, in case we provide an optional value +specification, a slider input is automatically generated. + +For now, we've assigned parameters with an internal ID, a label and a description. +To decouple human-readable labels and description from the actual data processor description, it is possible to extract the strings to a properties file. +In the `resources` folder, switch to a folder with the same name as the data processor's ID. If you've used the Maven archetype to build our project, there should be a `strings.en` file. +In this file, we can configure labels and descriptions. For instance, instead of writing + +```java + +.requiredPropertyWithUnaryMapping( + EpRequirements.domainPropertyReq(Geo.LAT), + Labels.from("latitude-field","Latitude","The event property containing the latitude value"), + PropertyScope.MEASUREMENT_PROPERTY + ) + +``` + +it is recommended to write + +```java + +.requiredPropertyWithUnaryMapping( + EpRequirements.domainPropertyReq(Geo.LAT), + Labels.withId("latitude-field"), + PropertyScope.MEASUREMENT_PROPERTY + ) + +``` + +and add the following line to the `strings.en` file: + +```properties + +latitude-field.title=Latitude +latitute-field.description=The event property containing the latitude value + +``` + +This feature will also ease future internationalization efforts. + +Besides requirements, users should be able to define the center coordinate of the Geofence and the size of the fence +defined as a radius around the center in meters. +The radius can be defined by adding a simple required text field to the description: + +```java +.requiredIntegerParameter("radius","Geofence Size","The size of the circular geofence in meters.",0,1000,1) +``` + +Such user-defined parameters are called _static properties_. There are many different types of static properties (see +the [Processor SDK](06_extend-sdk-static-properties.md) for an overview). Similar to stream requirements, it is also recommended to type `Labels.withId("radius")` and move labels and descriptions to the resource file. + +In this example, we'll further add two very simple input fields to let users provide latitude and longitude of the +geofence center. + +Add the following line to the `declareConfig` method: + +```java + .requiredFloatParameter(Labels.from(LATITUDE_KEY,"Latitude","The latitude value")) + .requiredFloatParameter(Labels.from(LONGITUDE_KEY,"Longitude","The longitude value")) + +``` + +Now we need to define the output of our Geofencing pipeline element. +As explained in the first section, the element should fire every time some geo-located entity arrives within the defined +geofence. +Therefore, the processor outputs the same schema as it receives as an input. +Although we don't know the exact input right now as it depends on the stream users connect in StreamPipes when creating +pipelines, we can define an _output strategy_ as follows: + +```java +.outputStrategy(OutputStrategies.keep()) +``` + +This defines a _KeepOutputStrategy_, i.e., the input event schema is not modified by the processor. +There are many more output strategies you can define depending on the functionality you desire, e.g., _AppendOutput_ for +defining a processor that enriches events or _CustomOutput_ in case you would like users to select the output by +themselves. + +That's it! We've now defined input requirements, required user input and an output strategy. +In the next section, you will learn how to extract these parameters once the pipeline element is invoked after a +pipeline was created. + +## Pipeline element invocation + +Once users start a pipeline that uses our geofencing component, the _onPipelineStarted_ method in our class is called. The +interface `IDataProcessorParameters` includes convenient access to user-configured parameters a users has selected in the pipeline +editor and information on the actual streams that are connected to the pipeline element. + +Next, we are interested in the fields of the input event stream that contains the latitude and longitude value we would +like to compute against the geofence center location as follows: + +```java + String latitudeFieldName = params.extractor().mappingPropertyValue("latitude-field"); + String longitudeFieldName = params.extractor().mappingPropertyValue("longitude-field"); +``` + +We use the same `internalId` we've used to define the mapping property requirements in the `declareModel` method. + +Next, for extracting the geofence center coordinates, add to class variables centerLatitude and centerLongitude and +assign the selected values using the following statements: + +```java + this.centerLatitude = params.extractor().singleValueParameter(LATITUDE_CENTER,Float.class); + this.centerLongitude = params.extractor().singleValueParameter(LONGITUDE_CENTER,Float.class); +``` + +The radius value can be extracted as follows: + +```java + int radius = params.extractor().singleValueParameter("radius",Float.class); +``` + +Great! That's all we need to describe a data processor for usage in StreamPipes. Your processor class should look as +follows: + +```java + +package org.apache.streampipes.pe.example; + +import org.apache.streampipes.extensions.api.pe.IStreamPipesDataProcessor; +import org.apache.streampipes.extensions.api.pe.config.IDataProcessorConfiguration; +import org.apache.streampipes.extensions.api.pe.context.EventProcessorRuntimeContext; +import org.apache.streampipes.extensions.api.pe.param.IDataProcessorParameters; +import org.apache.streampipes.extensions.api.pe.routing.SpOutputCollector; +import org.apache.streampipes.model.runtime.Event; +import org.apache.streampipes.sdk.builder.ProcessingElementBuilder; +import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder; +import org.apache.streampipes.sdk.builder.processor.DataProcessorConfiguration; +import org.apache.streampipes.sdk.helpers.EpProperties; +import org.apache.streampipes.sdk.helpers.EpRequirements; +import org.apache.streampipes.sdk.helpers.Labels; +import org.apache.streampipes.sdk.helpers.OutputStrategies; +import org.apache.streampipes.sdk.helpers.SupportedFormats; +import org.apache.streampipes.sdk.helpers.SupportedProtocols; +import org.apache.streampipes.vocabulary.SO; + +public class GeofencingProcessor implements IStreamPipesDataProcessor { + + private static final String LATITUDE_CENTER = "latitude-center"; + private static final String LONGITUDE_CENTER = "longitude-center"; + + private float centerLatitude; + private float centerLongitude; + private String latitudeFieldName; + private String longitudeFieldName; + + private int radius; + + public IDataProcessorConfiguration declareConfig() { + return DataProcessorConfiguration.create( + GeofencingProcessor::new, + ProcessingElementBuilder.create("org.streampipes.tutorial-geofencing") + .category(DataProcessorType.ENRICH) + .withAssets(Assets.DOCUMENTATION, Assets.ICON) + .withLocales(Locales.EN) + .requiredStream(StreamRequirementsBuilder + .create() + .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat), + Labels.from("latitude-field", "Latitude", "The event " + + "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY) + .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng), + Labels.from("longitude-field", "Longitude", "The event " + + "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY) + .build()) + .outputStrategy(OutputStrategies.keep()) + .requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.", 0, 1000, 1) + .requiredFloatParameter(Labels.from(LATITUDE_CENTER, "Latitude", "The latitude value")) + .requiredFloatParameter(Labels.from(LONGITUDE_CENTER, "Longitude", "The longitude value")) + .build() + ); + } + + @Override + public void onPipelineStarted(IDataProcessorParameters params, + SpOutputCollector collector, + EventProcessorRuntimeContext runtimeContext) { + this.centerLatitude = params.extractor().singleValueParameter(LATITUDE_CENTER, Float.class); + this.centerLongitude = params.extractor().singleValueParameter(LONGITUDE_CENTER, Float.class); + this.latitudeFieldName = params.extractor().mappingPropertyValue("latitude-field"); + this.longitudeFieldName = params.extractor().mappingPropertyValue("longitude-field"); + this.radius = params.extractor().singleValueParameter("radius", Integer.class); + } + + @Override + public void onEvent(Event event, + SpOutputCollector collector) { + + } + + @Override + public void onPipelineStopped() { + + } +} + +``` + +## Adding an implementation + +Everything we need to do now is to add an implementation. + +Add the following piece of code to the onEvent method, which realizes the Geofencing functionality: + +```java + + @Override + public void onEvent(Event event, + SpOutputCollector collector) { + float latitude = event.getFieldBySelector(latitudeFieldName).getAsPrimitive().getAsFloat(); + float longitude = event.getFieldBySelector(longitudeFieldName).getAsPrimitive().getAsFloat(); + + float distance = distFrom(latitude,longitude, centerLatitude, centerLongitude); + + if(distance <= radius){ + collector.collect(event); + } + } + + public static float distFrom(float lat1, float lng1, float lat2, float lng2) { + double earthRadius = 6371000; + double dLat = Math.toRadians(lat2-lat1); + double dLng = Math.toRadians(lng2-lng1); + double a = Math.sin(dLat/2)*Math.sin(dLat/2) + + Math.cos(Math.toRadians(lat1))*Math.cos(Math.toRadians(lat2)) * + Math.sin(dLng/2)*Math.sin(dLng/2); + + double c = 2*Math.atan2(Math.sqrt(a),Math.sqrt(1-a)); + + return(float)(earthRadius*c); + } +``` + +We won't go into details here as this isn't StreamPipes-related code, but in general the class extracts latitude and +longitude fields from the input event (which is provided as a map data type) and calculates the distance between the +geofence center and these coordinates. +If the distance is below the given radius, the event is forwarded to the next operator. + +See the [event model](06_extend-sdk-event-model.md) guide to learn how to extract parameters from events. + +## Registering the pipeline element + +The final step is to register the data processor in the `Init` method. Add the following line to +the `SpServiceDefinitionBuilder`: + +```java + .registerPipelineElement(new GeofencingProcessor()) +``` + +## Starting the service + +:::tip + +Once you start the service, it will register in StreamPipes with the hostname. The hostname will be auto-discovered and +should work out-of-the-box. +In some cases, the detected hostname is not resolvable from within a container (where the core is running). In this +case, provide a SP_HOST environment variable to override the auto-discovery. + +::: + +:::tip + +The default port of all pipeline element services as defined in the `create` method is port 8090. +If you'd like to run multiple services at the same time on your development machine, change the port here. As an +alternative, you can also provide an env variable `SP_PORT` which overrides the port settings. This is useful to use +different configs for dev and prod environments. + +::: + +Now we are ready to start our service! + +Configure your IDE to provide an environment variable called ``SP_DEBUG`` with value ``true`` when starting the project. + +Execute the main method in the class `Init` we've just created. + +The service automatically registers itself in StreamPipes. +To install the just created element, open the StreamPipes UI and follow the manual provided in +the [user guide](03_use-install-pipeline-elements.md). + +## Read more + +Congratulations! You've just created your first data processor for StreamPipes. +There are many more things to explore and data processors can be defined in much more detail using multiple wrappers. +Follow our [SDK guide](06_extend-sdk-static-properties.md) to see what's possible! diff --git a/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-data-sinks.md b/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-data-sinks.md new file mode 100644 index 000000000..09baeff71 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/06_extend-tutorial-data-sinks.md @@ -0,0 +1,272 @@ +--- +id: extend-tutorial-data-sinks +title: "Tutorial: Data Sinks" +sidebar_label: "Tutorial: Data Sinks" +--- + +In this tutorial, we will add a new data sink using the standalone wrapper. + +From an architectural point of view, we will create a self-contained service that includes the description of the data +sink and a corresponding implementation. + +## Objective + +We are going to create a new data sink that calls an external HTTP endpoint to forward data to an external service. + +For each incoming event, an external service is invoked using an HTTP POST request. In this example, we'll call an +endpoint provided by [RequestBin](https://requestbin.com/). +To setup your own endpoint, go to [https://requestbin.com/](https://requestbin.com/) and click "Create a request bin". +Copy the URL of the newly created endpoint. + +## Project setup + +Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project +skeleton (streampipes-archetype-extensions-jvm). +Enter the following command in a command line of your choice (Apache Maven needs to be installed): + +``` +mvn archetype:generate -DarchetypeGroupId=org.apache.streampipes \ +-DarchetypeArtifactId=streampipes-archetype-extensions-jvm -DarchetypeVersion=0.93.0 \ +-DgroupId=org.streampipes.tutorial -DartifactId=sink-tutorial -DclassNamePrefix=Rest -DpackageName=mypackage +``` + +You will see a project structure similar to the structure shown in the [archetypes](06_extend-archetypes.md) section. + +:::tip + +Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your +application into a Docker container. + +::: + +Now you're ready to create your first data sink for StreamPipes! + +## Adding data sink requirements + +First, we will add a new stream requirement. +Create a class `RestSink` which should look as follows: + +```java +package org.apache.streampipes.pe.example; + +import org.apache.streampipes.extensions.api.pe.IStreamPipesDataSink; +import org.apache.streampipes.extensions.api.pe.config.IDataSinkConfiguration; +import org.apache.streampipes.extensions.api.pe.context.EventSinkRuntimeContext; +import org.apache.streampipes.extensions.api.pe.param.IDataSinkParameters; +import org.apache.streampipes.model.DataSinkType; +import org.apache.streampipes.model.runtime.Event; +import org.apache.streampipes.model.schema.PropertyScope; +import org.apache.streampipes.sdk.builder.DataSinkBuilder; +import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder; +import org.apache.streampipes.sdk.builder.sink.DataSinkConfiguration; +import org.apache.streampipes.sdk.helpers.EpRequirements; +import org.apache.streampipes.sdk.helpers.Labels; +import org.apache.streampipes.sdk.helpers.Locales; +import org.apache.streampipes.sdk.utils.Assets; + +public class RestSink implements IStreamPipesDataSink { + + @Override + public IDataSinkConfiguration declareConfig() { + return DataSinkConfiguration.create( + RestSink::new, + DataSinkBuilder.create("org.apache.streampipes.tutorial.pe.sink.rest") + .category(DataSinkType.NOTIFICATION) + .withAssets(Assets.DOCUMENTATION, Assets.ICON) + .withLocales(Locales.EN) + .requiredStream(StreamRequirementsBuilder + .create() + .requiredPropertyWithNaryMapping(EpRequirements.anyProperty(), Labels.withId( + "fields-to-send"), PropertyScope.NONE) + .build()) + .build() + ); + } + + @Override + public void onPipelineStarted(IDataSinkParameters params, + EventSinkRuntimeContext eventSinkRuntimeContext) { + + } + + @Override + public void onEvent(Event event) { + + } + + @Override + public void onPipelineStopped() { + + } + + +``` + +In this class, we need to implement three methods: The `declareConfig` method is used to define abstract stream +requirements such as event properties that must be present in any input stream that is later connected to the element +using the StreamPipes UI. +The second method, `onPipelineStarted` is called once a pipeline using this sink is started. The third method, `onEvent`, is +called for every incoming event. + +The `DataSinkBuilder` within the ``declareConfig`` method describes the properties of our data sink: + +* ``category`` defines a category for this sink. +* ``withAssets`` denotes that we will provide an external documentation file and an icon, which can be found in + the ``resources`` folder +* ``withLocales`` defines that we will provide an external language file, also available in the ``resources`` folder +* ``requiredStream`` defines requirements any input stream connected to this sink must provide. In this case, we do not + have any specific requirements, we just forward all incoming events to the REST sink. However, we want to let the user + display a list of available fields from the connected input event, where users can select a subset. This is defined by + defining a Mapping from the empty requirement. This will later on render a selection dialog in the pipeline editor. + +The ``onPipelineStarted`` method is called when a pipeline containing the sink is started. Once a pipeline is started, we +would like to extract user-defined parameters. +In this example, we simply extract the fields selected by users that should be forwarded to the REST sink. Finally, we +return a new configured event sink containing the parameters. + +## Pipeline element invocation + +Once users start a pipeline that uses our geofencing component, the _onInvocation_ method in our class is called. The +interface `IDataSinkParameters` includes methods to extract the configuration parameters a user has selected in +the pipeline editor and information on the actual streams that are connected to the pipeline element. + +## Adding an implementation + +Now we'll add a proper implementation (i.e., the Rest call executed for every incoming event) to the following methods: + +Our final class should look as follows: + +```java +package org.apache.streampipes.pe.example; + +import org.apache.streampipes.commons.exceptions.SpRuntimeException; +import org.apache.streampipes.dataformat.SpDataFormatDefinition; +import org.apache.streampipes.dataformat.json.JsonDataFormatDefinition; +import org.apache.streampipes.extensions.api.pe.IStreamPipesDataSink; +import org.apache.streampipes.extensions.api.pe.config.IDataSinkConfiguration; +import org.apache.streampipes.extensions.api.pe.context.EventSinkRuntimeContext; +import org.apache.streampipes.extensions.api.pe.param.IDataSinkParameters; +import org.apache.streampipes.model.DataSinkType; +import org.apache.streampipes.model.runtime.Event; +import org.apache.streampipes.model.schema.PropertyScope; +import org.apache.streampipes.sdk.builder.DataSinkBuilder; +import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder; +import org.apache.streampipes.sdk.builder.sink.DataSinkConfiguration; +import org.apache.streampipes.sdk.helpers.EpRequirements; +import org.apache.streampipes.sdk.helpers.Labels; +import org.apache.streampipes.sdk.helpers.Locales; +import org.apache.streampipes.sdk.utils.Assets; + +import com.google.common.base.Charsets; +import org.apache.http.client.fluent.Request; +import org.apache.http.entity.StringEntity; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class RestSink implements IStreamPipesDataSink { + + private static final Logger LOG = LoggerFactory.getLogger(RestSink.class); + + private static final String REST_ENDPOINT_URI = YOUR_REQUEST_BIN_URL; + private List fieldsToSend; + private SpDataFormatDefinition dataFormatDefinition; + + @Override + public IDataSinkConfiguration declareConfig() { + return DataSinkConfiguration.create( + RestSink::new, + DataSinkBuilder.create("org.apache.streampipes.tutorial.pe.sink.rest") + .category(DataSinkType.NOTIFICATION) + .withAssets(Assets.DOCUMENTATION, Assets.ICON) + .withLocales(Locales.EN) + .requiredStream(StreamRequirementsBuilder + .create() + .requiredPropertyWithNaryMapping(EpRequirements.anyProperty(), Labels.withId( + "fields-to-send"), PropertyScope.NONE) + .build()) + .build() + ); + } + + @Override + public void onPipelineStarted(IDataSinkParameters params, + EventSinkRuntimeContext eventSinkRuntimeContext) { + this.dataFormatDefinition = new JsonDataFormatDefinition(); + this.fieldsToSend = params.extractor().mappingPropertyValues("fields-to-send"); + } + + @Override + public void onEvent(Event event) { + Map outEventMap = event.getSubset(fieldsToSend).getRaw(); + try { + String json = new String(dataFormatDefinition.fromMap(outEventMap)); + Request.Post(REST_ENDPOINT_URI).body(new StringEntity(json, Charsets.UTF_8)).execute(); + } catch (SpRuntimeException e) { + LOG.error("Could not parse incoming event"); + } catch (IOException e) { + LOG.error("Could not reach endpoint at {}", REST_ENDPOINT_URI); + } + } + + @Override + public void onPipelineStopped() { + + } +} + +``` + +The only class variable you need to change right now is the REST_ENDPOINT_URL. Change this url to the URL provided by +your request bin. +In the ``ònEvent`` method, we use a helper method to get a subset of the incoming event. +Finally, we convert the resulting ``Map`` to a JSON string and call the endpoint. + +## Preparing the service + +The final step is to register the sink as a pipeline element. + +Go to the class `Init` and register the sink: + +```java +.registerPipelineElement(new RestSink()) +``` + +## Starting the service + +:::tip + +Once you start the service, it will register in StreamPipes with the hostname. The hostname will be auto-discovered and +should work out-of-the-box. +In some cases, the detected hostname is not resolvable from within a container (where the core is running). In this +case, provide a SP_HOST environment variable to override the auto-discovery. + +::: + +:::tip + +The default port of all pipeline element services as defined in the `create` method is port 8090. +If you'd like to run multiple services at the same time on your development machine, change the port here. As an +alternative, you can also provide an env variable `SP_PORT` which overrides the port settings. This is useful to use +different configs for dev and prod environments. + +::: + +Now we are ready to start our service! + +Configure your IDE to provide an environment variable called ``SP_DEBUG`` with value ``true`` when starting the project. + +Execute the main method in the class `Init` we've just created. The service automatically registers itself in +StreamPipes. + +To install the created element, open the StreamPipes UI and follow the manual provided in +the [user guide](03_use-install-pipeline-elements.md). + +## Read more + +Congratulations! You've just created your first data sink for StreamPipes. +There are many more things to explore and data sinks can be defined in much more detail using multiple wrappers. +Follow our [SDK guide](../dev-guide-sdk-guide-sinks) to see what's possible! diff --git a/website-v2/versioned_docs/version-0.95.1/07_technicals-architecture.md b/website-v2/versioned_docs/version-0.95.1/07_technicals-architecture.md new file mode 100644 index 000000000..e62ba4625 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/07_technicals-architecture.md @@ -0,0 +1,110 @@ +--- +id: technicals-architecture +title: Architecture +sidebar_label: Architecture +--- + +## Architecture + +StreamPipes Architecture + +Apache StreamPipes implements a microservice architecture as shown in the figure above. + +## StreamPipes Core + +The StreamPipes Core is the central component to manage all StreamPipes resources. +It delegates the management of adapters, pipeline elements, pipelines and functions to registered extensions services (see below) and monitors the execution of extensions. +The Core also provides internal REST interfaces to communicate with the user interface, as well as public REST interfaces that can be used by external applications and StreamPipes clients. + +Configuration and user data are stored in an Apache CouchDB database. + +## StreamPipes Extensions + +An Apache StreamPipes extensions service is a microservice which contains the implementation of specific adapters, data streams, data processors, data sinks and functions. +Multiple extension services can be part of a single StreamPipes installation. +Each service might provide its own set of extensions. Extensions services register at the StreamPipes Core at startup. Users are able to install all or a subset of extensions of each service. +This allows StreamPipes to be extended at runtime by starting a new service with additional extensions. + +Extensions can be built using the SDK (see [Extending StreamPipes](06_extend-setup.md)). +Extensions services can be provided either in Java or in Python. + +:::info + +As of version 0.93.0, the Python SDK supports functions only. If you would like to develop pipeline elements in Python as well, let us know in a [Github discussions](https://github.com/apache/streampipes/discussions) comment, so that we can better prioritize development. + +::: + + +An extensions service interacts with the core by receiving control messages to invoke or detach an extension. +In addition, the core regularly fetches monitoring and log data from each registered extensions service. + + +## StreamPipes Client + +The Apache StreamPipes Client is a lightweight library for Java and Python which can be used to interact with StreamPipes resources programmatically. +For instance, users use the client to influence the control flow of pipelines, to download raw data from the data lake APIs or to realize custom applications with live data. + + +## Third-party systems + +In addition to the core components, an Apache StreamPipes version uses several third-party services, which are part of the standard installation. + +* Configurations and user data is stored in an [Apache CouchDB](https://couchdb.apache.org) database. +* Time-series data is stored in an [InfluxDB](https://github.com/influxdata/influxdb) database. +* Events are exchanged over a messaging system. Users can choose from various messaging systems that StreamPipes supports. Currently, we support [Apache Kafka](https://kafka.apache.org), [Apache Pulsar](https://pulsar.apache.org), [MQTT](https://mqtt.org/) and [NATS](https://nats.io/). The selection of the right messaging system depends on the use case. See [Messaging](07_technicals-messaging.md) for more information. + +:::info + +Versions prior to 0.93.0 included Consul for service discovery and registration. Starting from 0.93.0 onwards, we switched to an internal service discovery mechanism. + +::: + +All mentioned third-party services are part of the default installation and are auto-configured during the installation process. + +## Programming Languages + +Apache StreamPipes is mainly written in Java. +Services are based on Spring Boot. +The included [Python integration](https://streampipes.apache.org/docs/docs/python/latest/) is written in Python. + +The user interface is mainly written in TypeScript using the Angular framework. + + +## Data Model + +Internally, Apache StreamPipes realizes a stream processing layer where events are continuously exchanged over a messaging system. +When building a pipeline, data processors consume data from a topic assigned by the core and publish data back to another topic, which is also assigned by the core. + +At runtime, events have a flat and easily understandable data structure, consisting of key/value pairs. Events are serialized in JSON, although StreamPipes can be configured to use other (binary) message formats. + +This allows for easy integration with other systems which want to consume data from Streampipes, since an event could look as simple as this: + +```json +{ + "timestamp": 1234556, + "deviceId": "ABC", + "temperature": 37.5 +} +``` + +However, this wouldn't be very expressive, right? To [assist users](07_technicals-user-guidance.md), StreamPipes provides a rich description layer for events. So under the hood, for the `temperature` field shown above StreamPipes can also store the following: + +```json +{ + "label": "Temperature", + "description": "Measures the temperature during leakage tests", + "measurementUnit": "https://qudt.org/vocab/unit/DEG_C", + "runtimeName": "temperature", + "runtimeType": "xsd:float", + "semanticType": "https://my-company-vocabulary/leakage-test-temperature" +} +``` + +By dividing the description layer from the runtime representation, we get a good trade-off between expressivity, readability for humans and lightweight runtime message formats. +The schema is stored in an internal schema registry and available to the client APIs and user interface views to improve validation and user guidance. + +StreamPipes also supports arrays and nested structures, although we recommend using flat events where possible to ease integration with downstream systems (such as time-series storage). + + + + diff --git a/website-v2/versioned_docs/version-0.95.1/07_technicals-messaging.md b/website-v2/versioned_docs/version-0.95.1/07_technicals-messaging.md new file mode 100644 index 000000000..d5308a6d8 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/07_technicals-messaging.md @@ -0,0 +1,65 @@ +--- +id: technicals-messaging +title: Messaging +sidebar_label: Messaging +--- + +## Architecture + +To exchange messages at runtime between individual [Extensions Services](07_technicals-architecture.md), StreamPipes uses external messaging systems. +This corresponds to an event-driven architecture with a central message broker and decoupled services which consume and produce events from the messaging system. + +There are many different open source messaging systems on the market, which each have individual strengths. +To provide a flexible system which matches different needs, StreamPipes can be configured to use various messaging systems. + +## Supported messaging systems + +The following messaging systems are currently supported: + +* Apache Kafka +* Apache Pulsar +* MQTT +* NATS + +## Configure StreamPipes to use another messaging system + +Configuring StreamPipes for one of these messaging system is an installation-time configuration. +We currently do not recommend to change the configuration at runtime. + +The protocol can be configured with the environment variable `SP_PRIORITIZED_PROTOCOL` assigned to the core with one of the following values: + +```bash +SP_PRIORITIZED_PROTOCOL=kafka # Use Kafka as protocol +SP_PRIORITIZED_PROTOCOL=pulsar # Use Pulsar as protocol +SP_PRIORITIZED_PROTOCOL=mqtt # Use MQTT as protocol +SP_PRIORITIZED_PROTOCOL=nats # Use NATS as protocol +``` + +Note that each extension service can support an arbitrary number of protocols. For instance, you can have a lightweight extension service which only supports NATS, but have another, cloud-centered service which supports Kafka, both registered at the Core. +To select a protocol when multiple protocols are supported by two pipeline elements, StreamPipes selects a protocol based on a priority, which can be configured in the [Configuration View](03_use-configurations.md). +StreamPipes ensures that only pipeline elements which have a commonly supported protocol can be connected. + +Note that you might need to change the installation files. For the `Docker-Compose` based installation, we provide various compose file for different messaging setups. For the `Kubernetes` installation, we provide variables which can be set in the helm chart's `values.yaml` file. + +### Configure broker addresses + +By default, StreamPipes assumes that the messaging system is started from its own environment, e.g., the system configured in the selected `Docker-Compose` file. + +Besides that, it is also possible to let StreamPipes connect to an externally provided messaging system. For this purpose, various environment variables exist. + +* `SP_PRIORITIZED_PROTOCOL` to set the prioritized protocol to either `kafka`, `mqtt`, `nats` or `pulsar` + +* `SP_KAFKA_HOST`, `SP_KAFKA_PORT` to configure Kafka access +* `SP_MQTT_HOST`, `SP_MQTT_PORT` to configure MQTT access +* `SP_NATS_HOST`, `SP_NATS_PORT` to configure NATS access +* `SP_PULSAR_URL` to configure Pulsar access + + +Most settings can also be set in the UI under `Settings->Messaging`. + +:::warning Installation-time configurations +Although it is currently possible to change messaging settings in the user interface, we do not support dynamic modification of messaging systems. +Choosing a proper system is considered an installation-time setting which should not be changed afterwards. +Already existing Adapters and pipeline elements are not properly updated after changes of the messaging layer. +::: + diff --git a/website-v2/versioned_docs/version-0.95.1/07_technicals-runtime-wrappers.md b/website-v2/versioned_docs/version-0.95.1/07_technicals-runtime-wrappers.md new file mode 100644 index 000000000..9cebadfd2 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/07_technicals-runtime-wrappers.md @@ -0,0 +1,37 @@ +--- +id: technicals-runtime-wrappers +title: Runtime Wrappers +sidebar_label: Runtime Wrappers +--- + +## Overview + +In general, StreamPipes has an exchangeable runtime layer, e.g., the actual processing of incoming events can be delegated to a third-party stream processing system such as Kafka Streams or Apache Flink. + +The default runtime wrapper is the StreamPipes Native Wrapper, called the `StandaloneWrapper`. + +Although not recommended for production, we invite interested developers to check out our experimental wrappers: + +* Kafka Streams runtime wrapper at [https://github.com/apache/streampipes/tree/dev/streampipes-wrapper-kafka-streams](https://github.com/apache/streampipes/tree/dev/streampipes-wrapper-kafka-streams) +* Apache Flink runtime wrapper at [https://github.com/apache/streampipes/tree/dev/streampipes-wrapper-flink](https://github.com/apache/streampipes/tree/dev/streampipes-wrapper-flink) + +## Assigning a runtime wrapper to an extension service + +Runtime wrappers can be assigned in the `Service Definition` of the `Init` class of an extension service: + +```java + + @Override + public SpServiceDefinition provideServiceDefinition(){ + return SpServiceDefinitionBuilder.create("org.apache.streampipes.extensions.all.jvm", + "StreamPipes Extensions (JVM)", + "",8090) + ... + .registerRuntimeProvider(new StandaloneStreamPipesRuntimeProvider()) + ... + .build(); + } + +``` + +Please let us know through our communication channels if you are interested in this feature and if you are willing to contribute! diff --git a/website-v2/versioned_docs/version-0.95.1/07_technicals-user-guidance.md b/website-v2/versioned_docs/version-0.95.1/07_technicals-user-guidance.md new file mode 100644 index 000000000..697411861 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/07_technicals-user-guidance.md @@ -0,0 +1,7 @@ +--- +id: technicals-user-guidance +title: User Guidance +sidebar_label: User Guidance +--- + +tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/08_debugging.md b/website-v2/versioned_docs/version-0.95.1/08_debugging.md new file mode 100644 index 000000000..95892c175 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/08_debugging.md @@ -0,0 +1,7 @@ +--- +id: debugging-debugging +title: Debugging +sidebar_label: Debugging +--- + +tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/08_monitoring.md b/website-v2/versioned_docs/version-0.95.1/08_monitoring.md new file mode 100644 index 000000000..6680b5d86 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/08_monitoring.md @@ -0,0 +1,7 @@ +--- +id: debugging-monitoring +title: Monitoring +sidebar_label: Monitoring +--- + +tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/09_contribute.md b/website-v2/versioned_docs/version-0.95.1/09_contribute.md new file mode 100644 index 000000000..119568929 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/09_contribute.md @@ -0,0 +1,17 @@ +--- +id: community-contribute +title: Contribute +sidebar_label: Contribute +--- + +## Contribute + +We welcome contributions to StreamPipes. If you are interested in contributing to StreamPipes, let us know! You'll +get to know an open-minded and motivated team working together to build the next IIoT analytics toolbox. + +Here are some first steps in case you want to contribute: +* Subscribe to our dev mailing list [dev-subscribe@streampipes.apache.org](mailto:dev-subscribe@streampipes.apache.org) +* Send an email, tell us about your interests and which parts of Streampipes you'd like to contribute (e.g., core or UI)! +* Ask for a mentor who helps you to understand the code base and guides you through the first setup steps +* Find an issue on [GitHub](https://github.com/apache/streampipes/issues) which is tagged with a _good first issue_ tag +* Have a look at our **developer wiki** at [https://cwiki.apache.org/confluence/display/STREAMPIPES](https://cwiki.apache.org/confluence/display/STREAMPIPES) to learn more about StreamPipes development. diff --git a/website-v2/versioned_docs/version-0.95.1/09_get-help.md b/website-v2/versioned_docs/version-0.95.1/09_get-help.md new file mode 100644 index 000000000..077f0b62f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/09_get-help.md @@ -0,0 +1,25 @@ +--- +id: community-get-help +title: Get Help +sidebar_label: Get Help +--- + +The Apache StreamPipes community is happy to help with any questions or problems you might have. + +## Questions +Subscribe to our user mailing list to ask a question. + +[Mailing Lists](https://streampipes.apache.org/mailinglists.html) + +To subscribe to the user list, send an email to [users-subscribe@streampipes.apache.org](users-subscribe@streampipes.apache.org) + +You can also ask questions on our Github discussions page: +[Github Discussions](https://github.com/apache/streampipes/discussions) + +## Bugs and Feature Requests + +If you've found a bug or have a feature that you'd love to see in StreamPipes, feel free to create an issue on [GitHub](https://github.com/apache/streampipes/issues) +or [discuss your ideas](https://github.com/apache/streampipes/discussions/categories/ideas). + + + diff --git a/website-v2/versioned_docs/version-0.95.1/faq-common-problems.md b/website-v2/versioned_docs/version-0.95.1/faq-common-problems.md new file mode 100644 index 000000000..14195c0f0 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/faq-common-problems.md @@ -0,0 +1,73 @@ +--- +id: faq-common-problems +title: Common Problems +sidebar_label: Common Problems +--- + +* Windows 10: Consul, Kafka, Zookeeper, or Kafka-Rest did not start +* Linux / OSX: Consul does not start +* Run StreamPipes in a VM in Windows +* Only few processors are available in the pipeline editor +* No data is shown in the live dashbord +* Windows 10: Should I use settings windows containers or docker containers? +* Configurations are not deleted +* Help us to improve StreamPipes and this documentation +* Docker Network already used + +## Windows 10: Consul, Kafka, Zookeeper, or Kafka-Rest did not start +**Problem:** You get an error message similar to: `ERROR: for consul Cannot start service consul: b'driver failed programming external connectivity on endpoint sp-test_consul_1 (eae0457fc03c1364b8e81a6e155ca4b95ee1e1d01bb3c1aa9dd5192bdcb7b91a): Error starting userland proxy: mkdir /port/tcp:0.0.0.0:8600:tcp:172.30.0.9:8600: input/output error` + +**Solution:** To resolve this problem, stop StreamPipes with `streampipes stop` and restart Docker via the Docker settings in the task bar. +After Docker was restarted, run `streampipes start`. + +## Consul does not start +**Problem:** After starting StreamPipes with `streampipes start`, there is an error with Consul: + +**Solution:** To resolve this, execute `streampipes stop`, wait a minute and start it again with `streampipes start`. If you've installed an old version of StreamPipes (before the installer was available), make sure that no network suffixed with `spnet` exists in Docker. Type `docker network ls` to check and `docker network rm NETWORK_NAME` to remove the existing network before running the installer. + +## Run StreamPipes in a VM in Windows +**Problem:** StreamPipes does not work properly with Docker under Windows 8 or earlier versions. + +**Solution:** We do support virtual machines (VMs), but if you run them under Windows, there might be problems with docker and its network configurations. +Please use Windows 10, OSX or Linux. +You can also use a VM from a cloud provider to test StreamPipes. + + +## Only few processors are available in the pipeline editor +**Problem:** In the Pipeline Editor, only a few processors can be used in pipelines. + +**Solution:** In the demo/desktop version, we only integrated a few processors. To ensure that you can easily try out StreamPipes (even on your laptop), + we tried to make it as lightweight as possible. If you are interested in more sophisticated algorithms, pleas contact us. + + +## No data is shown in the live dashboard +**Problem:** The live dashboard does not show any data. + +**Solution:** If this is the case, your IP is probably configured wrong. +You can reinstall the system by running `streampipes clean` and then `streampipes start` again. +This will delete all StreamPipes configurations. StreamPipes is designed as a server application and requires a fixed IP. +We created a version to easily run it on your laptop and test it, but on your laptop you usually get a new IP when you change the network. +This problem only occurs in testing scenarios, in production scenarios the IP can also be changed manually without data loss. + +## Windows 10: Should I use settings windows containers or docker containers +**Problem:** StreamPipes does not work with Windows 10. + +**Solution:** You should use docker containers. Go to the docker settings on our taks bar and select 'Switch to Docker containers'. + +## Configurations are not deleted +**Problem:** The configurations are not deleted from the host system. Even after manually removing the 'config/' folder StreamPipes settings are note deleted. +Also the Consul settings are still there. + +**Solution:** Probably Docker did not mount a volume in the 'config/' folder. You must delete the anonymous docker volumes manually. See in docker [documentation](https://docs.docker.com/engine/reference/commandline/volume_rm/). + + +## Docker Network already used +**Problem:** When starting StreamPipes the error message: "Creating network 'streampipes-cli_spnet' with driver 'bridge' Pool overlaps with other one on this address space" is shown. + +**Solution:** Delete old networks for example with "docker network prune". + +## Help us to improve StreamPipes and this documentation +Help us to improve this section. +If you have any problems with the system or with the documentation, do not hesitate to contact us. +Our goal is to continuously improve StreamPipes. +Your help and feedback is welcome. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.adapters.image.stream.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.adapters.image.stream.md new file mode 100644 index 000000000..a03806d9e --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.adapters.image.stream.md @@ -0,0 +1,38 @@ +--- +id: org.apache.streampipes.connect.adapters.image.stream +title: Image Upload (Stream) +sidebar_label: Image Upload (Stream) +--- + + + + + +

+ +

+ +*** + +## Description + +Upload a zip file of images and create an event per image + +*** + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.adapters.iss.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.adapters.iss.md new file mode 100644 index 000000000..ab27d577d --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.adapters.iss.md @@ -0,0 +1,39 @@ +--- +id: org.apache.streampipes.connect.adapters.iss +title: ISS Location +sidebar_label: ISS Location +--- + + + + + +

+ +

+ +*** + +## Description + +Shows the live position of the International Space Station (ISS), updated every two seconds. + + +*** + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.influxdb.stream.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.influxdb.stream.md new file mode 100644 index 000000000..d01a6ac50 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.influxdb.stream.md @@ -0,0 +1,41 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.influxdb.stream +title: InfluxDB Stream Adapter +sidebar_label: InfluxDB Stream Adapter +--- + + + + + +

+ +

+ +*** + +## Description +Creates a data stream for an InfluxDB measurement. + +*** + +## Configuration + + + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.iolink.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.iolink.md new file mode 100644 index 000000000..38363c96e --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.iolink.md @@ -0,0 +1,90 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.iolink +title: ifm IOLink +sidebar_label: ifm IOLink +--- + + + + + +

+ +

+ +*** + +## Description + +This adapter enables the integration of IO-Link sensor data produced by an ifm IO-Link Master +(e.g., AL1350) with Apache StreamPipes. To use this adapter, you need to configure your IO-Link +master to publish events to an MQTT broker. This can be achieved through a REST interface or via +the browser at `http://##IP_OF_IO_LINK_MASTER##/web/subscribe`. For detailed instructions, +please refer to the ifm documentation. + +### Requirements +The JSON events should include the following information: +- `deviceinfo.serialnumber` +- Only the pdin value is required for each port (e.g., `port[0]`). +- The event `timer[1].datachanged` can be used as a trigger. +Using this adapter, you can create a stream for sensors of the same type. + +### Restrictions +This version supports a single IO-Link master. If you want to connect multiple masters, they must have the same setup. +If you have different requirements, please inform us through the mailing list or GitHub discussions. + +*** + +## Configuration + +Here is a list of the configuration parameters you must provide. + +### Broker URL + +Enter the URL of the broker, including the protocol (e.g. `tcp://10.20.10.3:1883`) + +### Access Mode + +If necessary, provide broker credentials. + +### Ports + +Select the ports that are connected to the IO-Link sensors. + +### Sensor Type + +Choose the type of sensor you want to connect. (**IMPORTANT:** Currently, only the VVB001 is supported) + +## Output + +The output includes all values from the selected sensor type. Here is an example for the `VVB001 sensor`: +``` +{ + "aPeak": 6.6, + "aRms": 1.8, + "crest": 3.7, + "out1": true, + "out2": true, + "port": "000000001234", + "status": 0, + "temperature": 22, + "timestamp": 1685525380729, + "vRms": 0.0023 +} +``` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.netio.mqtt.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.netio.mqtt.md new file mode 100644 index 000000000..80cb04156 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.netio.mqtt.md @@ -0,0 +1,64 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.netio.mqtt +title: NETIO MQTT M2M +sidebar_label: NETIO MQTT M2M +--- + + + + + +

+ +

+ +*** + +## Description + +Connect Robots running on ROS + + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +Describe the configuration parameters here + +### Ros Bridge + +Example: test-server.com (No protocol) + +### Port + +The port of the ROS instance. + +### Topic + +Example: /battery (Starts with /) + + +## Output + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.netio.rest.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.netio.rest.md new file mode 100644 index 000000000..83c741ecc --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.netio.rest.md @@ -0,0 +1,64 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.netio.rest +title: NETIO http JSON +sidebar_label: NETIO http JSON +--- + + + + + +

+ +

+ +*** + +## Description + +Connect Robots running on ROS + + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +Describe the configuration parameters here + +### Ros Bridge + +Example: test-server.com (No protocol) + +### Port + +The port of the ROS instance. + +### Topic + +Example: /battery (Starts with /) + + +## Output + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.oi4.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.oi4.md new file mode 100644 index 000000000..1621d8702 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.oi4.md @@ -0,0 +1,88 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.oi4 +title: OI4 +sidebar_label: OI4 +--- + + + +# Open Industry 4.0 (OI4) + +

+ +

+ +--- + + + +The OI4 adapter facilitates the integration of any OT-device compliant with the OI4 standard into Apache StreamPipes. +For detailed information about this standard, please refer to their [development guide](https://openindustry4.com/fileadmin/Dateien/Downloads/OEC_Development_Guideline_V1.1.1.pdf). + +### Requirements + +Your OI4-compatible device should emit data via an MQTT broker. + +### Restrictions + +This adapter exclusively allows data consumption from a specific MQTT topic. +If you have different requirements, please notify us through the mailing list or GitHub discussions. + +--- + +## Configuration + +Below is a list of the configuration parameters you need to provide. + +### Broker URL + +Enter the URL of the broker, including the protocol and port number (e.g., `tcp://10.20.10.3:1883`). + +### Access Mode + +Choose between unauthenticated access or input your credentials for authenticated access. + +### Sensor Description + +You should provide information about the sensor you want to connect to. This can be achieved in two ways: + +a) **By Type**: Specify the type of sensor you want to connect to, e.g., `'VVB001'`. <\br> +b) **By IODD**: Simply upload the IODD description of the respective sensor. Please note: This feature is not yet available! If you're interested in this feature, please notify us through the mailing list or GitHub discussions and share your use case with us. + +### Selected Sensors + +Configure which sensors of the master device you want to connect to. You can either select `All`, which will provide data from all sensors available on the respective MQTT topic, or choose `Custom Selection` and provide a list of sensor IDs in a comma-separated string (e.g., `000008740649,000008740672`). + +## Output + +The output consists of all values from the selected sensor type. Below is an example for the `VVB001 sensor`: + +```json +{ + "a-Rms": 1.8, + "OUT2": true, + "SensorID": "000008740649", + "Temperature": 22, + "Crest": 3.7, + "v-Rms": 0.0023, + "OUT1": true, + "Device status": 0, + "timestamp": 1685525380729 +} +``` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.opcua.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.opcua.md new file mode 100644 index 000000000..76a65ca55 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.opcua.md @@ -0,0 +1,76 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.opcua +title: OPC UA +sidebar_label: OPC UA +--- + + + + + +

+ +

+ +*** + +## Description + +Reads values from an OPC-UA server repeatedly + +*** + +## Required Input + +*** + +## Configuration + +### Polling Interval + +Duration of the polling interval in seconds + +### Anonymous vs. Username/Password + +Choose whether you want to connect anonymously or authenticate using your credentials. + +     **Anonymous**: No further information required
+     **Username/Password**: Insert your `username` and `password` to access the OPC UA server + +### OPC UA Server + +Where can the OPC UA server be found? + +     **URL**: Specify the server's full `URL` (including port), can be with our without leading `opc.tcp://`
+     **Host/Port**: Insert the `host` address (with or without leading `opc.tcp://`) and the `port`
+ +### Namespace Index + +Requires the index of the namespace you want to connect to. + +### Node ID + +The identifier of the node you want to read from, numbers and strings are both valid. + +### Available Nodes + +Shows all available nodes once namespace index and node ID are given. +Select as much as you like to query. + +*** diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.plc4x.modbus.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.plc4x.modbus.md new file mode 100644 index 000000000..4239ba8b3 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.plc4x.modbus.md @@ -0,0 +1,75 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.plc4x.modbus +title: PLC4X MODBUS +sidebar_label: PLC4X MODBUS +--- + + + + + +

+ +

+ +*** + +## Description + +The Modbus adapter allows to connect to a PLC using the Modbus specification. + +*** + +## Configuration + +The following configuration options are available when creating the adapter: + +### PLC Address + +The IP address of the Modbus device without any prefix, which will be added automatically when creating the adapter. + +### PLC Port + +The PLC port refers to the port of the PLC, such as 502. + +### Node ID + +The Node ID refers to the ID of the specific device. + +### Nodes + +The `Nodes` section requires configuration options for the individual nodes. +Nodes can be either imported from a comma-separated CSV file, or can be directly assigned in the configuration menu. + +The following fields must be provided for each node: + +* Runtime Name: Refers to the field to internally identify the node, e.g., in the data explorer or pipeline editor. +* Node Address: Refers to the address of the Node in Modbus, e.g., 1 +* Object Type: Can be selected from the available options `DiscreteInput`, `Coil`, `InputRegister`, + or `HoldingRegister`. + +An example CSV file looks as follows: + +``` +Runtime Name,Node Address,Object Type, +field1,1,Coil +temperature,2,Coil +``` + +Note that the CSV header must exactly match the titles `Runtime Name`, `Node Address` and `Object Type`. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.plc4x.s7.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.plc4x.s7.md new file mode 100644 index 000000000..9e22be6df --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.plc4x.s7.md @@ -0,0 +1,96 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.plc4x.s7 +title: PLC4X S7 +sidebar_label: PLC4X S7 +--- + + + + + +

+ +

+ +*** + +## Description + +The adapter allows to connect with a Siemens S7 PLC. + +*** + +## Configuration + +The following configuration options are available when creating an adapter: + +### PLC Address + +This field requires the PLC address in form of the IP without the prefixed protocol (e.g., 192.168.34.56). + +In addition to the pure IP, other parameters supported by Apache PLC4X can be provided as an URL parameter: + +* `local-rack` +* `local-slot` +* `local-tsap` +* `remote-rack` +* `remote-slot` + +Additional configs are separated by `&`. + +Example address: `192.68.34.56?remote-rack=0&remote-slot=3&controller-type=S7_400` + +See the Apache PLC4X documentation for more information. + +### Polling Interval + +The polling interval requires a number in milliseconds, which represents the interval in which the adapter will poll the +PLC for new data. For instance, a polling interval of 1000 milliseconds will configure the adapter to send a request to +the PLC every second. + +### Nodes + +In the Nodes section, the PLC nodes that should be gathered are defined. +There are two options to define the nodes: + +* Manual configuration: The address must be assigned manually by providing a runtime name, the node name and the + datatype. The `Runtime Name` will be the StreamPipes-internal name of the field, which will also show up in the data + explorer and pipeline editor. The `Node Name` refers to the node address of the PLC, e.g., `%Q0.4`. Finally, the data + type can be selected from the available selection. Currently available data types + are `Bool`, `Byte`, `Int`, `Word`, `Real`, `Char`, `String`, `Date`, `Time of Day` and `Date and Time`. +* Instead of providing the node information manually, a CSV file can be uploaded. The CSV file can, for instance, be + exported from TIA and then be enriched with the appropriate runtime names. This is especially useful when many fields + should be added as nodes. Here is an example export enriched with the runtime name: + +``` +Runtime Name,Path,Data Type,Node Name +I_High_sensor,Tag table_1,Bool,%I0.0, +I_Low_sensor,Tag table_1,Bool,%I0.1, +I_Pallet_sensor,Tag table_1,Bool,%I0.2, +I_Loaded,Tag table_1,Bool,%I0.3, +``` + +Note that the CSV can contain additional columns, but only the columns `Runtime Name`, `Data Type` and `Node Name` are +used, while all other columns will be ignored. + +## Best Practices + +Instead of creating a large event containing all nodes that should be available in StreamPipes, consider to group the +fields logically into smaller adapters. +This will ease the definition of pipelines for users and eases future modifications. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.ros.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.ros.md new file mode 100644 index 000000000..aeac39947 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.ros.md @@ -0,0 +1,64 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.ros +title: ROS Bridge +sidebar_label: ROS Bridge +--- + + + + + +

+ +

+ +*** + +## Description + +Connect Robots running on ROS + + +*** + +## Required input + + + +*** + +## Configuration + +Describe the configuration parameters here + +### Ros Bridge + +Example: test-server.com (No protocol) + +### Port + +The port of the ROS instance. + +### Topic + +Example: /battery (Starts with /) + + +## Output + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.simulator.machine.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.simulator.machine.md new file mode 100644 index 000000000..be0b3bf20 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.adapters.simulator.machine.md @@ -0,0 +1,92 @@ +--- +id: org.apache.streampipes.connect.iiot.adapters.simulator.machine +title: Machine Data Simulator +sidebar_label: Machine Data Simulator +--- + + + + + +

+ +

+ +*** + +## Description + +This adapter publishes simulated machine sensor data at a configurable time interval. It is ideal for exploring the +capabilities of StreamPipes without needing your own data or for testing purposes. Three different sensor scenarios are +available: + +* Flowrate +* Pressure +* Water Level + +All scenarios include an error or anomaly condition, making them suitable for trend detection, anomaly detection, and +similar applications. + +### Flowrate Sensor + +This scenario simulates a flowrate sensor in a piping system, including a sensor defect situation. The generated data +stream includes: + +- **Sensor ID**: The identifier or name of the sensor, such as `sensor01`. +- **Mass Flow**: Numeric value denoting the current mass flow in the sensor, ranging from 0 to 10. +- **Volume Flow**: Numeric value denoting the current volume flow, ranging from 0 to 10. +- **Temperature**: Numeric value denoting the current temperature in degrees Celsius, ranging from 40 to 100. +- **Density**: Numeric value denoting the current density of the fluid, ranging from 40 to 50. +- **Sensor Fault Flags**: Boolean indicator of sensor issues. + +The sensor defect scenario is as follows: Normally, temperature values range between 40 and 50 degrees Celsius. After +thirty seconds, the simulation switches to defect mode for another thirty seconds, with temperatures ranging from 80 to +100 degrees Celsius and `Sensor Fault Flags` set to `true`. + +### Pressure Sensor + +This scenario simulates a pressure sensor in a gas tank, including an anomaly situation. The generated data stream +includes: + +- **Sensor ID**: The identifier or name of the sensor, such as `sensor01`. +- **Pressure**: Numeric value denoting the current pressure in the tank, ranging from 10 to 70. + +The anomaly scenario is as follows: Normally, pressure values range between 10 and 40. After thirty seconds, the +simulation switches to anomaly mode for another thirty seconds, with pressure values ranging from 40 to 70. + +### Water Level Sensor + +This scenario simulates a sensor in a water tank, including an overflow situation. The generated data stream includes: + +- **Sensor ID**: The identifier or name of the sensor, such as `sensor01`. +- **Level**: Numeric value denoting the current water level in the tank, ranging from 20 to 80. +- **Overflow**: Boolean indicator of tank overflow. + +The overflow scenario is as follows: Normally, level values range between 20 and 30. After thirty seconds, the +simulation switches to overflow mode for another thirty seconds, with level values ranging from 60 to 80 and `Overflow` +set to `true`. + +## Configuration + +When creating the adapter, the following parameters can be configured: + +- **Wait Time**: The time in milliseconds between two sensor events. Defaults to 1000 (1 second). +- **Sensor**: Select one of the sensor scenarios described above: `flowrate`, `pressure`, `waterlevel`. + +*** \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.file.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.file.md new file mode 100644 index 000000000..79efd7ca3 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.file.md @@ -0,0 +1,90 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.file +title: File Stream +sidebar_label: File Stream +--- + + + + + +

+ +

+ +*** + +## Description + +The File Stream Adapter enables continuous streaming of file contents to Apache StreamPipes, creating a data stream for utilization within StreamPipes. It's particularly handy when you prefer not to connect directly to the data source via StreamPipes or for testing and demonstration purposes. Currently, it supports the following file types: + +- CSV +- JSON +- XML + +### Example + +Suppose we have a CSV file (`temperature.csv`) containing data from a temperature sensor recording data every second: + +```text +time,temperature +1715593295000,36.3 +1715593296000,37.5 +1715593297000,37.0 +1715593298000,37.2 +1715593299000,37.2 +1715593210000,37.6 +1715593211000,37.4 +1715593212000,37.5 +1715593213000,37.5 +1715593214000,37.7 +``` + +When creating a new File Stream Adapter: +- Upload the file +- Select `yes` for `Replay Once` +- Choose `CSV` as the `Format` with `,` as the `delimiter`, check `Header` + +After creating the adapter, it will output one line of the CSV as an event every second. +Further details on configuration options are provided below. + +--- + +## Configuration + +### File + +This section determines the file to be streamed by the adapter. Options include: + +- `Choose existing file`: Select from files already present in StreamPipes. +- `Upload new file`: Upload a new file, also available for other adapters. Supports `.csv`, `.json`, and `.xml` file types. + +### Overwrite file time +Enable this option to always pass the current system time as the timestamp when emitting an event. If your file lacks timestamp information, this should be enabled. Conversely, if your file has timestamp information, enabling this option will overwrite it with the current system time. By default, this option is disabled, leaving timestamp information unaffected. + +### Replay Once +Distinguishes between replaying all data contained in the file only once or in a loop until the adapter is manually stopped. +If enabled, this will cause events from the file to be emitted multiple times. In this case, it is recommended to enable `Overwrite file time` if the resulting stream is to be persisted in StreamPipes, otherwise existing events with the same timestamp will be overwritten. + +### Replay Speed + +Configures the event frequency: +- **Keep original time**: Events are emitted based on the timestamp information in the file. +- **Fastest**: All data in the file is replayed as quickly as possible, with no waiting time. +- **Speed Up Factor**: Adjusts the waiting time of the adapter based on the provided speed up factor, considering the time between two events in the file. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.http.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.http.md new file mode 100644 index 000000000..e24df3a09 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.http.md @@ -0,0 +1,38 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.http +title: HTTP Stream +sidebar_label: HTTP Stream +--- + + + + + +

+ +

+ +*** + +## Description + +Continuously fetched events from an HTTP REST endpoint. + +*** + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.httpserver.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.httpserver.md new file mode 100644 index 000000000..366b7ccce --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.httpserver.md @@ -0,0 +1,51 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.httpserver +title: HTTP Server +sidebar_label: HTTP Server +--- + + + + + +

+ +

+ +*** + +## Description + +This adapter provides an HTTP endpoint for ingesting events. +Data sent to this endpoint via POST requests is transformed into StreamPipes events. + +### Configuration + +#### Endpoint Appendix + Specify the name of the endpoint resource. The endpoint can be accessed at {host of StreamPipes UI}/endpoints/{endpointName} + +#### Configuration +##### Manual +Provides an option to define the event schema manually. + +##### Import from file +Use a file with example data to automatically detect a first event schema. + +*** + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.kafka.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.kafka.md new file mode 100644 index 000000000..d2b380f23 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.kafka.md @@ -0,0 +1,38 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.kafka +title: Apache Kafka +sidebar_label: Apache Kafka +--- + + + + + +

+ +

+ +*** + +## Description + +Consumes messages from an Apache Kafka broker + +*** + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.mqtt.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.mqtt.md new file mode 100644 index 000000000..d3f374c07 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.mqtt.md @@ -0,0 +1,53 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.mqtt +title: MQTT +sidebar_label: MQTT +--- + + + + + +

+ +

+ +*** + +## Description + +Consumes messages from a broker using the MQTT protocol + + +*** + +## Configuration + +Describe the configuration parameters here + +### Broker Url + +Example: tcp://test-server.com:1883 (Protocol required. Port required)" + +### Access Mode + +Unauthenticated or Authenticated (Username/Password) + +## Output + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.nats.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.nats.md new file mode 100644 index 000000000..0e43cf54b --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.nats.md @@ -0,0 +1,69 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.nats +title: NATS +sidebar_label: NATS +--- + + + + + +

+ +

+ +*** + +## Description + +Consumes events from a NATS broker. + +*** + +## Configuration + +### NATS Subject + +The subject (topic) where events should be received from. When using wildcard subjects, all messages need to have the same format currently. + +### NATS Broker URL + +The URL to connect to the NATS broker. It can be provided multiple urls separated by commas(,). +(e.g., nats://localhost:4222,nats://localhost:4223) + +### Username + +The username to authenticate the client with NATS broker. + +It is an optional configuration. + +### NATS Broker URL + +The password to authenticate the client with NATS broker. + +It is an optional configuration. + +### NATS Connection Properties + +All other possible connection configurations that the nats client can be created with. +It can be provided as key value pairs separated by colons(:) and commas(,). +(e.g., io.nats.client.reconnect.max:1, io.nats.client.timeout:1000) + +It is an optional configuration. + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.pulsar.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.pulsar.md new file mode 100644 index 000000000..f9adc56ce --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.pulsar.md @@ -0,0 +1,38 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.pulsar +title: Apache Pulsar +sidebar_label: Apache Pulsar +--- + + + + + +

+ +

+ +*** + +## Description + +Consumes messages from an Apache Pulsar broker + +*** + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.rocketmq.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.rocketmq.md new file mode 100644 index 000000000..d6c5cb32d --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.rocketmq.md @@ -0,0 +1,38 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.rocketmq +title: Apache RocketMQ +sidebar_label: Apache RocketMQ +--- + + + + + +

+ +

+ +*** + +## Description + +Consumes messages from an Apache RocketMQ broker + +*** + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.tubemq.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.tubemq.md new file mode 100644 index 000000000..0f321339c --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connect.iiot.protocol.stream.tubemq.md @@ -0,0 +1,54 @@ +--- +id: org.apache.streampipes.connect.iiot.protocol.stream.tubemq +title: Apache TubeMQ (InLong) +sidebar_label: Apache TubeMQ (InLong) +--- + + + + + +

+ +

+ +*** + +## Description + +Consumes messages from an Apache TubeMQ broker. + +*** + +## Configuration + +### TubeMQ Master Information + +This field describes the endpoints of all the TubeMQ masters. + +The format should be like `ip1:port1,ip2:port2,ip3:port3`. + +### TubeMQ Topic + +The topic where events should be sent to. + +### TubeMQ Consumer Group + +The consumer group of the TubeMQ Consumer. + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connectors.ros.sink.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connectors.ros.sink.md new file mode 100644 index 000000000..c8416d3ae --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.connectors.ros.sink.md @@ -0,0 +1,60 @@ +--- +id: org.apache.streampipes.connectors.ros.sink +title: ROS Bridge Sink +sidebar_label: ROS Bridge Sink +--- + + + + + +

+ +

+ +*** + +## Description + +This data sinks can publish events to ROS over websocket. + + +*** + +## Required input + +User-defined parameters include the hostname of the ROS bridge, the port of the ROS instance and the topic. + +## Configuration + +Describe the configuration parameters here + +### Ros Bridge + +Example: test-server.com (No protocol) + +### Port + +The port of the ROS instance. + +### Topic + +Example: /battery (Starts with /) + + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification.md new file mode 100644 index 000000000..fb42a1a00 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification.md @@ -0,0 +1,52 @@ +--- +id: org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification +title: Generic Image Classification +sidebar_label: Generic Image Classification +--- + + + + +

+ +

+ +*** + +## Description + +Image + Classification Description (Generic Model) + +*** + +## Required input + +Input events require to have an image field. + +*** + +## Configuration + +Describe the configuration parameters here + +### Image field + +Field that contains the image. + +## Output \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper.md new file mode 100644 index 000000000..e2cd6b0e6 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper.md @@ -0,0 +1,43 @@ +--- +id: org.apache.streampipes.processor.imageclassification.jvm.image-cropper +title: Image Cropper +sidebar_label: Image Cropper +--- + + + + +

+ +

+ +*** + +## Description + +Image Enrichment: Crops an + image based on + given bounding box coordinates + +*** + +## Required input +An image and an array with bounding boxes. +A box consists of the x and y coordinates in the image as well as the height and width + +## Output +A new event for each box containing the cropped image \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher.md new file mode 100644 index 000000000..8a09f3ae8 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher.md @@ -0,0 +1,43 @@ +--- +id: org.apache.streampipes.processor.imageclassification.jvm.image-enricher +title: Image Enricher +sidebar_label: Image Enricher +--- + + + + + +

+ +

+ +*** + +## Description + +Image Enrichment: Enriches an + image with + given bounding box coordinates + +## Required input + +An image and an array with bounding boxes, an array with scores and an array with labels. +A box consists of the x and y coordinates in the image as well as the height and width, and the classindex with score + +## Output +A new event containing the image with bounding boxes rendered according to the boxes of the input event \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.qrcode.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.qrcode.md new file mode 100644 index 000000000..74f12344f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processor.imageclassification.qrcode.md @@ -0,0 +1,68 @@ +--- +id: org.apache.streampipes.processor.imageclassification.qrcode +title: QR Code Reader +sidebar_label: QR Code Reader +--- + + + + + +

+ +

+ +*** + +## Description + +QR Code Reader: Detects a QR Code in an image + +*** + +## Required input + +Input events require to have an image field. + +*** + +## Configuration + +### Image + +Image of the QR code + +### Send placeholder value if no qr code is detected + +It is a boolean selection. + +### Placeholder value + +Place holder value + +## Output + +Outputs a similar event like below. + +``` +{ + 'qrvalue': 'http://githhub.com/', + 'timestamp': 1621244783151 +} +``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.changedetection.jvm.welford.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.changedetection.jvm.welford.md new file mode 100644 index 000000000..10f95361f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.changedetection.jvm.welford.md @@ -0,0 +1,72 @@ +--- +id: org.apache.streampipes.processors.changedetection.jvm.welford +title: Welford Change Detection +sidebar_label: Welford Change Detection +--- + + + + + + + +*** + +## Description + +Performs change detection on a single dimension of the incoming data stream. This implementation tracks the mean and the +standard deviation using Welford's algorithm, which is well suited for data streams. A change is detected if the +cumulative deviation from the mean exceeds a certain threshold. + +*** + +## Required input + +The welford change dectection processor requires a data stream that has at least one field containing a numerical value. + +*** + +## Configuration + +### Value to observe + +Specify the dimension of the data stream (e.g. the temperature) on which to perform change detection. + +### Parameter `k` + +`k` controls the sensitivity of the change detector. Its unit are standard deviations. For an observation `x_n`, the +Cusum value is `S_n = max(0, S_{n-1} - z-score(x_n) - k)`. Thus, the cusum-score `S` icnreases +if `S_{n-1} - z-score(x_n) > k`. + +### Parameter `h` + +The alarm theshold in standard deviations. An alarm occurs if `S_n > h` + +## Output + +This processor outputs the original data stream plus + +- `cumSumLow`: The cumulative sum value for negative changes +- `cumSumHigh`: The cumulative sum value for positive changes +- `changeDetectedLow`: Boolean indicating if a negative change was detected +- `changeDetectedHigh`: Boolean indicating if a positive change was detected \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.jseval.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.jseval.md new file mode 100644 index 000000000..3fe018306 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.jseval.md @@ -0,0 +1,55 @@ +--- +id: org.apache.streampipes.processors.enricher.jvm.jseval +title: JavaScript Eval +sidebar_label: JavaScript Eval +--- + + + + + +

+ +

+ +*** + +## Description +A pipeline element that allows writing user defined JavaScript function to enrich events. + +*** + +## Required input +This processor does not have any specific input requirements. + +*** + +## Configuration +User can specify their custom enrichment logic within the `process` method. Please note that the `process` function +must be in the following format and it must return a map of data which is compatible with the output schema. +```javascript + function process(event) { + // do processing here. + // return a map with fields that matched defined output schema. + return {id: event.id, tempInCelsius: (event.tempInKelvin - 273.15)}; + } +``` + +## Output +A new event with the user defined output schema. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.math.mathop.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.math.mathop.md new file mode 100644 index 000000000..859708971 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.math.mathop.md @@ -0,0 +1,56 @@ +--- +id: org.apache.streampipes.processors.enricher.jvm.processor.math.mathop +title: Math +sidebar_label: Math +--- + + + + + +

+ +

+ +*** + +## Description + +Performs calculations on event properties (+, -, *, /, %). + +*** + +## Required input +The math processor works with any event that has at least one field containing a numerical value. + +*** + +## Configuration + +### Left operand +The field from the input event that should be used as the left operand. + +### Right operand +The field from the input event that should be used as the right operand. + +### Operation +The math operation that should be performed. + +## Output +The processor appends the calculation result to each input event. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.math.staticmathop.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.math.staticmathop.md new file mode 100644 index 000000000..ce274a8ea --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.math.staticmathop.md @@ -0,0 +1,56 @@ +--- +id: org.apache.streampipes.processors.enricher.jvm.processor.math.staticmathop +title: Static Math +sidebar_label: Static Math +--- + + + + + +

+ +

+ +*** + +## Description + +Performs calculation on an event property with a static value (+, -, *, /, %). + +*** + +## Required input +The math processor works with any event that has at least one field containing a numerical value. + +*** + +## Configuration + +### Left operand +The field from the input event that should be used as the left operand. + +### Right operand value +Specify the value of the right operand. + +### Operation +The math operation that should be performed. + +## Output +The processor appends the calculation result to each input event. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.trigonometry.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.trigonometry.md new file mode 100644 index 000000000..f5d55cca4 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.processor.trigonometry.md @@ -0,0 +1,56 @@ +--- +id: org.apache.streampipes.processors.enricher.jvm.processor.trigonometry +title: Trigonometry Functions +sidebar_label: Trigonometry Functions +--- + + + + + +

+ +

+ +*** + +## Description + +Performs Trigonometric functions (sin, cos, tan) on event properties. + +*** + +## Required input +The trigonometry processor works with any event that has at least one field containing a numerical value. + +*** + +## Configuration + +Describe the configuration parameters here + +### Alpha +The field that should be used for calculating the trigonometric function. + + +### Operation +The trigonometric function that should be calculated. + +## Output +The processor appends the calculation result to each input event. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.valuechange.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.valuechange.md new file mode 100644 index 000000000..92718d815 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.enricher.jvm.valuechange.md @@ -0,0 +1,52 @@ +--- +id: org.apache.streampipes.processors.enricher.jvm.valuechange +title: Value Change +sidebar_label: Value Change +--- + + + + + +

+ +

+ +*** + +## Description + +The processing element should be able to detect when a numeric property change from one configured value to another. + +*** + +## Required input +The required input is a number. + +*** + +## Configuration +Value of last event (example: 0) + +Value of current event (example: 5) + + +## Output +A boolean value is returned when the input changes. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.compose.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.compose.md new file mode 100644 index 000000000..8cb669890 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.compose.md @@ -0,0 +1,50 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.compose +title: Compose +sidebar_label: Compose +--- + + + + + +

+ +

+ +*** + +## Description + +Merges two event streams. Any time, a new input event arrives, it is merged with the last input event from the other +event stream and forwarded. + +*** + +## Required input +The Compose processor does not have any specific input requirements. + +*** + +## Configuration + +(no further configuration required) + +## Output +The compose processor has a configurable output that can be selected by the user at pipeline modeling time. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.enrich.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.enrich.md new file mode 100644 index 000000000..5dfb0d96b --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.enrich.md @@ -0,0 +1,47 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.enrich +title: Merge Two Streams +sidebar_label: Merge Two Streams +--- + + + + + +

+ +

+ +*** + +## Description +Merges two data streams by enriching one of the streams with the properties of the other stream. The output frequency is the same as the frequency of the stream which is enriched. +*** + +## Required input +None +*** + +## Configuration + +* Select the stream which should be enriched with the properties of the other stream. + * The last event of the stream is hold in state and each event of the other stream is enriched by the properties the user selected + +## Output +The compose processor has a configurable output that can be selected by the user at pipeline modeling time. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.limit.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.limit.md new file mode 100644 index 000000000..087d72ff1 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.limit.md @@ -0,0 +1,70 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.limit +title: Rate Limit +sidebar_label: Rate Limit +--- + + + + + +

+ +

+ +*** + +## Description +This limits the number of events emitted based on a specified criterion such as time, and number of events. + +*** + +## Required input +The processor works with any input event. + +*** + +## Configuration + +### Enable Grouping +Enabling this will use grouping with rate-limiting (note: disabling this will ignore `Grouping Field` property). + +### Grouping Field +Runtime field to be used as the grouping key. If grouping is disabled, this setting will be ignored. + +### Window Type +This specifies the type of window to be used (time / length / cron). + +### Length Window Size +Length window size in event count (note: only works with length window type). + +### Time Window Size +Time window size in milliseconds (note: only works with time window type). + +### Cron Window Expression +Cron expression [Link](https://www.freeformatter.com/cron-expression-generator-quartz.html) to trigger and emit events (i.e `0 * * ? * *` for every minute) (note: only works with cron window type). + +### Output Event Selection +This specifies the event(s) that are selected to be emitted. +- First: emit first event of the window. +- Last: emit last event of the window. +- All: emit all events of the window. + +## Output +The processor outputs events which satisfies rate-limiting conditions. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.merge.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.merge.md new file mode 100644 index 000000000..569b5ee2f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.merge.md @@ -0,0 +1,57 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.merge +title: Synchronize Two Streams +sidebar_label: Synchronize Two Streams +--- + + + + + +

+ +

+ +*** + +## Description + +Merges two event streams by their timestamp. +Two events of the different streams are merged when they occure to the same time + +The following figure shows how the events of the two data streams will be mergrged: + +

+ +

+ +*** + +## Required input +Each of the data streams needs a timestamp. + +*** + +## Configuration + +* For each stream a the timestamp property on which the merger is performed has to be selected +* The Time Interval describes the maximum value between two events to decide whether they are a match. To be a valid match the following function must be true: | timestamp_stream_1 - timestamp_stream_2 | < interval + +## Output +The Compose processor has a configurable output that can be selected by the user at pipeline modeling time. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.movingaverage.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.movingaverage.md new file mode 100644 index 000000000..2b6cad6d5 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.movingaverage.md @@ -0,0 +1,46 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.movingaverage +title: Moving Average +sidebar_label: Moving Average +--- + + + + +*** + + +## Description + +Smooths the data stream by the mean/median of the last n values. + +*** + +## Required input +A numerical field is required. +*** + +## Configuration +### N Value +Specifies the number of previous data points which are used to smooth the data. +### Method +Specifies the method which is used to smooth the data. Choose between mean and median. + +## Output +Appends a field with the smoothed data. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.numericalfilter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.numericalfilter.md new file mode 100644 index 000000000..55c320801 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.numericalfilter.md @@ -0,0 +1,56 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.numericalfilter +title: Numerical Filter +sidebar_label: Numerical Filter +--- + + + + + +

+ +

+ +*** + +## Description +The Numerical Filter processor filters numerical values based on a given threshold. + +*** + +## Required input +The processor works with any input event that has one field containing a numerical value. + +*** + +## Configuration + +### Field +Specifies the field name where the filter operation should be applied on. + + +### Operation +Specifies the filter operation that should be applied on the field. + +### Threshold value +Specifies the threshold value. + +## Output +The processor outputs the input event if it satisfies the filter expression. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter.md new file mode 100644 index 000000000..90466f092 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter.md @@ -0,0 +1,67 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.numericaltextfilter +title: Numerical Text Filter +sidebar_label: Numerical Text Filter +--- + + + + + +

+ +

+ + +*** + +## Description +The Numerical Text Filter processor filters numerical values based on a given threshold and text values +based on a given string. It only forwards events in case both criteria are satisfied. + +*** + +## Required input +The processor works with any input event that has one field containing a numerical value and one field +containing a text. + +*** + +## Configuration + +### Number Field +Specifies the field name where the filter operation should be applied on. + +### Number Operation +Specifies the filter operation that should be applied on the field. + +### Number Threshold +Specifies the threshold value. + +### Text Field +The field containing the text that should be filtered. + +### Text Operation +The operation used by the filter processor (equals or matches). + +### Text Keyword +Specifies the keyword to filter the text field. + +## Output +The processor outputs the input event if it satisfies the filter expression. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.processor.booleanfilter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.processor.booleanfilter.md new file mode 100644 index 000000000..1f2d71468 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.processor.booleanfilter.md @@ -0,0 +1,52 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.processor.booleanfilter +title: Boolean Filter +sidebar_label: Boolean Filter +--- + + + + + +

+ +

+ +*** + +## Description +The Boolean Filter processor filters based on a boolean value field + +*** + +## Required Input +The processor works with any input event that has one field containing a boolean value + +*** + +## Configuration + +### Field +Specifies the field name where the filter operation should be applied on. + +### Field Value +Events with the selected field value are forwarded + +## Output +The processor outputs the input event if the event value is equals the selected field value \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.project.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.project.md new file mode 100644 index 000000000..69a0616bb --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.project.md @@ -0,0 +1,48 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.project +title: Projection +sidebar_label: Projection +--- + + + + + +

+ +

+ +*** + +## Description +Outputs a selectable subset of an input event type. + +*** + +## Required input +The project processor works with any input event stream. + +*** + +## Configuration + +(no further configuration required) + +## Output +The output depends on the fields selected at pipeline development time. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.schema.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.schema.md new file mode 100644 index 000000000..ebdfc66e0 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.schema.md @@ -0,0 +1,46 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.schema +title: Merge stream with same schema +sidebar_label: Merge stream with same schema +--- + + + + + + + +## Description + +Merges two events by their schema. +It checks two whether the schemas of two events are equal or not. +If the schemas are not equal we can throw SpRuntimeException otherwise we can collect them. + +*** + +## Required input +Two events are needed. +*** + +## Configuration + +For each stream schema info should be present + +## Output +Events with same schemas. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.sdt.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.sdt.md new file mode 100644 index 000000000..02de52173 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.sdt.md @@ -0,0 +1,85 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.sdt +title: Swinging Door Trending (SDT) Filter Processor +sidebar_label: Swinging Door Trending (SDT) Filter Processor +--- + + + + + +

+ +

+ + +*** + +## Description + +The **Swinging Door Trending (SDT)** algorithm is a linear trend compression algorithm. +In essence, it replaces a series of continuous `(timestamp, value)` points with a straight line determined by the start and end points. + +The **Swinging Door Trending (SDT) Filter Processor** can extract and forward the characteristic events of the original stream. +In general, this filter can also be used to reduce the frequency of original data in a lossy way. + +*** + +## Required Inputs + +The processor works with any input event that has **one field containing a timestamp** and +**one field containing a numerical value**. + +*** + +## Configuration + +### Timestamp Field +Specifies the timestamp field name where the SDT algorithm should be applied on. + +### Value Field +Specifies the value field name where the SDT algorithm should be applied on. + +### Compression Deviation +**Compression Deviation** is the most important parameter in SDT that represents the maximum difference +between the current sample and the current linear trend. + +**Compression Deviation** needs to be greater than 0 to perform compression. + +### Compression Minimum Time Interval +**Compression Minimum Time Interval** is a parameter measures the time distance between two stored data points, +which is used for noisy reduction. + +If the time interval between the current point and the last stored point is less than or equal to its value, +current point will NOT be stored regardless of compression deviation. + +The default value is `0` with time unit ms. + +### Compression Maximum Time Interval +**Compression Maximum Time Interval** is a parameter measure the time distance between two stored data points. + +If the time interval between the current point and the last stored point is greater than or equal to its value, +current point will be stored regardless of compression deviation. + +The default value is `9,223,372,036,854,775,807`(`Long.MAX_VALUE`) with time unit ms. + +*** + +## Output +The characteristic event stream forwarded by the SDT filter. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.textfilter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.textfilter.md new file mode 100644 index 000000000..ce5c254b6 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.textfilter.md @@ -0,0 +1,53 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.textfilter +title: Text Filter +sidebar_label: Text Filter +--- + + + + + +

+ +

+ +*** + +## Description +The Text Filter processor filters text values based on a given string. + +*** + +## Required input +The processor works with any input event that has one field containing a text. + +*** + +## Configuration + +### Text Field +The field containing the text that should be filtered. + + +### Operation +The operation used by the filter processor (equals or matches) + +## Output +The processor outputs the input event if it satisfies the filter expression. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.threshold.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.threshold.md new file mode 100644 index 000000000..6ffa67058 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.threshold.md @@ -0,0 +1,56 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.threshold +title: Threshold Detector +sidebar_label: Threshold Detector +--- + + + + + +

+ +

+ +*** + +## Description +The Threshold Detector processor appends a boolean whether the condition is fulfilled or not + +*** + +## Required input +The processor works with any input event that has one field containing a numerical value. + +*** + +## Configuration + +### Field +Specifies the field name where the filter operation should be applied on. + + +### Operation +Specifies the filter operation that should be applied on the field. + +### Threshold value +Specifies the threshold value. + +## Output +Appends a boolean with the value whether the condition is fulfilled or not. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.throughputmon.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.throughputmon.md new file mode 100644 index 000000000..f97b5f24b --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.filters.jvm.throughputmon.md @@ -0,0 +1,56 @@ +--- +id: org.apache.streampipes.processors.filters.jvm.throughputmon +title: Throughput Monitor +sidebar_label: Throughput Monitor +--- + + + + + +

+ +

+ +*** + +## Description +The Throughput Monitoring processor computes throughput statistics. + +*** + +## Required Input +The processor works with any input event. + +*** + +## Configuration + +### Batch Window Size +Specifies the number of events that should be used for calculating throughput statistics. + + +## Output +The processor outputs a new event containing: +* The current timestamp (timestamp) +* The start time of the batch window (starttime) +* The end time of the batch window (endtime) +* The duration between both windows (duration) +* The number of events collected in the window (should be equal to batch size) +* The throughput in events per second diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.buffergeometry.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.buffergeometry.md new file mode 100644 index 000000000..a022374e7 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.buffergeometry.md @@ -0,0 +1,95 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.jts.processor.buffergeometry +title: Geo Buffer Geometry +sidebar_label: Geo Buffer Geometry +--- + + + + + +

+ +

+ +*** + +## Description + +Creates a buffer polygon geometry from a geometry +*** + +## Required inputs + +* JTS Geometry +* EPSG Code +* Distance +* Cap Style +* Join Style +* Mitre-Limit +* Side +* Simplify Factor +* Quadrant Segments +*** + +## Configuration + +### Geometry field +Input Geometry + +### EPSG field +Integer value representing EPSG code + +### Distance +The buffer distance around in geometry in meter + +### Cap Style +Defines the endcap style of the buffer. +* CAP_ROUND - the usual round end caps +* CAP_FLAT - end caps are truncated flat at the line ends +* CAP_SQUARE - end caps are squared off at the buffer distance beyond the line ends + +### Simplify Factor +The default simplify factor Provides an accuracy of about 1%, which matches the accuracy of the +default Quadrant Segments parameter. + +### Quadrant Segments +The default number of facets into which to divide a fillet of 90 degrees. + +### Join Style +Defines the corners in a buffer +* JOIN_ROUND - the usual round join +* JOIN_MITRE - corners are "sharp" (up to a distance limit) +* JOIN_BEVEL - corners are beveled (clipped off). + +### Mitre-Limit +Mitre ratio limit (only affects mitered join style) + +### Side +`left` or `right` performs a single-sided buffer on the geometry, with the buffered side +relative to the direction of the line or polygon. + +*** + +## Output +A polygon geometry with EPSG code. Shape is defined by input parameters. + + +### Example + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.bufferpoint.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.bufferpoint.md new file mode 100644 index 000000000..04346ce85 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.bufferpoint.md @@ -0,0 +1,82 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.jts.processor.bufferpoint +title: Geo Buffer Point +sidebar_label: Geo Buffer Point +--- + + + + + +

+ +

+ +*** + +## Description + +Creates a buffer polygon geometry from a point geometry +*** + +## Required inputs + +* JTS Geometry +* EPSG Code +* Distance +* Cap Style +* Simplify Factor +* Quadrant Segments +*** + +## Configuration + +### Geometry Field +Input Point Geometry + +### EPSG field +Integer value representing EPSG code + +### Distance +The buffer distance around the geometry in meter + +### Cap Style +Defines the endcap style of the buffer. +CAP_ROUND - the usual round end caps +CAP_SQUARE - end caps are squared off at the buffer distance beyond the line ends + + +### Simplify Factor +The default simplify factor provides an accuracy of about 1%, which matches the accuracy of the +default Quadrant Segments parameter. + +### Quadrant Segments +The default number of facets into which to divide a fillet of 90 degrees. + +*** + +## Output +A polygon geometry with EPSG code. Shape is defined by input parameters. + +

+ +

+ +### Example + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.epsg.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.epsg.md new file mode 100644 index 000000000..294124c81 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.epsg.md @@ -0,0 +1,64 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.jts.processor.epsg +title: Geo EPSG Code +sidebar_label: Geo EPSG Code +--- + + + + + +

+ +

+ + +*** + +## Description + +This processor adds an integer value to the event. This integer value represents +an EPSG Code as an Spatial Reference System Identifier +an (SRID). + + +*** + +## Required inputs + +None + +*** + +## Configuration + +Integer values, representing a spatial reference system +SRID. +Other possible values can be looked up via +spatialreference.org. + +### Parameter + +4- to 5-digit key integer number. Default value is 4326 representing the World Geodetic System +(WGS84). + +*** +## Output + +Adds the epsg number to the event. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latlngtojtspoint.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latlngtojtspoint.md new file mode 100644 index 000000000..acc01928f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latlngtojtspoint.md @@ -0,0 +1,73 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.jts.processor.latlngtojtspoint +title: Geo Create JTS Point +sidebar_label: Geo Create JTS Point +--- + + + + + +

+ +

+ +*** + +## Description + +This processor creates a JTS Point geometry from latitude and longitude value. + +*** + +## Required inputs + +* Ontology Vocabulary Latitude +* Ontology Vocabulary Longitude +* Integer value representing EPSG Code + + +*** + +## Configuration + +Creates a JTS Geometry Point from Longitude (x) and Latitude (y) values in the coordinate reference system represented by the EPSG code. +An empty point geometry is created if latitude or longitude value is missing in the event (e.g. null value) or values are out of range. Allowed values for Longitude are between -180.00 and 180.00; Latitude values between -90.00 and 90.00. + +### 1st parameter +Latitude value + +### 2nd parameter +Longitude value + +### 3rd parameter +EPSG code value + +*** + +## Output + +Adds a point geometry in the Well Known Text notation and in Longitude (x) Latitude (y) axis order to the stream. + +### Example +* Input stream:
+ `{latitude=48.5622, longitude=-76.3501, EPSG=4326}` + +* Output Stream
+ `{latitude=48.5622, longitude=-76.3501, EPSG=4326, geom_wkt=POINT (-76.3501 48.5622)}` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.reprojection.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.reprojection.md new file mode 100644 index 000000000..99ea9b9c1 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.reprojection.md @@ -0,0 +1,68 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.jts.processor.reprojection +title: Geo CRS Reprojection +sidebar_label: Geo CRS Reprojection +--- + + + + + +

+ +

+ +*** + +## Description + +Change of CRS due reprojection + +*** + +## Required input + +* WKT String of a JTS Point Geometry +* Integer value representing Source EPSG code +* Integer value representing Target EPSG code + + +*** + +## Configuration + +Manual Input of target EPSG Code and WKT will be reporjected. + +### 1st parameter +Geometry WKT String + +### 2nd parameter +Source EPSG code + +### 3rd parameter +Target EPSG code + +*** + +## Output + +Update of event with new EPSG udn WKT-Literal in the dependent target epsg. + +### Example + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory.md new file mode 100644 index 000000000..ee602fee6 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory.md @@ -0,0 +1,83 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory +title: Geo Single Trajectory Creator +sidebar_label: Geo Single Trajectory Creator +--- + + + + + +

+ +

+ +*** + +## Description + +This processor creates a JTS LineString geometry from JTS Points events, represent a trajectory. A trajectory is defined as the path that a moving object follows through space as a function of time. Each sub-point of this LineString represents a single event. The latest sub-point represents the latest geo-event. For each Point event it is also possible to store an additional m-value representing for example actually speed, distance, duration or direction of this event. A trajectory consists of at least two sub-point and can't be infinitive, so a threshold of maximum allowed sub-points is required. When the sub-point threshold is exceeded, the oldest point is removed from the LineString. +*** + +## Required inputs + +* WKT String of a JTS Point Geometry +* Integer value representing EPSG code +* Number value for M-value + + +*** + +## Configuration + +Creates a JTS Geometry LineString from a JTS Point Geometries events representing a trajectory. + + +### 1st parameter +Point WKT String + +### 2nd parameter +EPSG code value + +### 3rd parameter +M-value for each sub-point of the trajectory + +### 4rd parameter +String for a description text for the trajectory + +### 5rd parameter +Number of allowed sub-points + +*** + +## Output + +Adds a LineString geometry in the Well Known Text to the event, representing a trajectory. Also the description text is added to the event stream. The first existing event creates an empty LineString. + +### Example +Creating a LineString with a threshold of 2 allowed sub-points: + +* First Event: + * Point(8.12 41.23) --> LineString(empty) +* Second Event: + * Point(8.56 41.25) --> LineString(8.12 41.23, 8.56 41.25) +* Second Event: + * Point(8.84 40.98) --> LineString(8.56 41.25, 8.84 40.98) + +M-value is not represented in the LineString but will be stored for internal use! diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.complex.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.complex.md new file mode 100644 index 000000000..9cd2db4b2 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.complex.md @@ -0,0 +1,86 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.jts.processor.validation.complex +title: Geo Geometry Topology Validation Filter +sidebar_label: Geo Geometry Topology Validation Filter +--- + + + + + +

+ +

+ +*** + +## Description +Validates geometry of topology +erros from JTS. + +* **HOLE_OUTSIDE_SHELL**: Indicates that a hole of a polygon lies partially or completely in the exterior of the shell +* **NESTED_HOLES**: Indicates that a hole lies in the interior of another hole in the same polygon +* **DISCONNECTED_INTERIOR**: Indicates that the interior of a polygon is disjoint (often caused by set of contiguous holes splitting the polygon into two parts) +* **SELF_INTERSECTION**: Indicates that two rings of a polygonal geometry intersect +* **RING_SELF_INTERSECTION**: Indicates that a ring self-intersects +* **NESTED_SHELLS**: Indicates that a polygon component of a MultiPolygon lies inside another polygonal component +* **DUPLICATE_RINGS**: Indicates that a polygonal geometry contains two rings which are identical +* **TOO_FEW_POINTS**: Indicates that either a LineString contains a single point or a LinearRing contains 2 or 3 points +* **RING_NOT_CLOSED**: Indicates that a ring is not correctly closed (the first and the last coordinate are different) + + +*** + +## Required inputs + +* JTS Geometry +* EPSG Code +* Validation Type +* Log Output Option + + +*** + +## Configuration + +### Point Geometry Field +Input Point Geometry + +### EPSG field +Integer value representing EPSG code + +### Validation Output +Chose the output result of the filter. +* Valid - all valid events are parsed through +* Invalid - all invalid events are parsed through + + +### Log Output Option +Options to activate Log-Output to the Pipeline Logger Window with detailed reason why Geometry is invalid + + +*** + +### Default Validation Checks + +## Output + +All events that match the validation output. + +### Example diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.simple.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.simple.md new file mode 100644 index 000000000..21c77b1b1 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.simple.md @@ -0,0 +1,80 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.jts.processor.validation.simple +title: Geo Geometry Validation Filter +sidebar_label: Geo Geometry Validation Filter +--- + + + + + +

+ +

+ +*** + +## Description + +Checks the geometry event if the geometry is simple and / or empty. + +*** + +## Required inputs + +* JTS Geometry +* EPSG Code +* Validation Type +* Validation Output + + +*** + +## Configuration + +Validates geometry of different validations categories. + + +### Point Geometry Field +Input Point Geometry + +### EPSG field +Integer value representing EPSG code + +### Validation Type +* IsEmpty - Geometry is empty. +* IsSimple - Geometry is simple. The SFS definition of simplicity follows the general rule that a Geometry is simple if it has no points of self-tangency, self-intersection or other anomalous points. + * Valid polygon geometries are simple, since their rings must not self-intersect. + * Linear rings have the same semantics. + * Linear geometries are simple if they do not self-intersect at points other than boundary points. + * Zero-dimensional geometries (points) are simple if they have no repeated points. + * Empty Geometries are always simple! + +### Validation Output +Chose the output result of the filter. +* Valid - all valid events are parsed through +* Invalid - all invalid events are parsed through + +*** + +## Output + +All events that match the validation output. + +### Example diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversine.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversine.md new file mode 100644 index 000000000..075dd23d0 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversine.md @@ -0,0 +1,61 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversine +title: Geo Distance Calculator (Haversine) +sidebar_label: Geo Distance Calculator (Haversine) +--- + + + + + +

+ +

+ +*** + +## Description +Calculates the distance between two latitude/longitude pairs in a single event with the +Haversine formula. + +*** + +## Required inputs +Requires a position of point on the Earth's surface specified by the two geographic coordinates: the longitude and latitude of the point. + +*** + +## Configuration + +### First Longitude +This is the first geographic coordinate that specifies the east-west position of a point on the Earth's surface. + +### First Latitude +This is the second geographic coordinate that specifies the north-south position of a point on the Earth's surface. + +### Second Longitude +This is the second geographic coordinate that specifies the east-west position of a point on the Earth's surface. + +### Second Latitude +This is the second geographic coordinate that specifies the north-south position of a point on the Earth's surface. + +## Output +{ + 'distance': 12.2 +} \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversinestatic.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversinestatic.md new file mode 100644 index 000000000..0cd8b8c33 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversinestatic.md @@ -0,0 +1,74 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversinestatic +title: Geo Distance Calculator Static (Haversine) +sidebar_label: Geo Distance Calculator Static (Haversine) +--- + + + + + +

+ +

+ +*** + +## Description + +Calculates the distance with the Haversine formula between a fixed location (e.g., a place) and a latitude/longitude pair of an input + event. + +*** + +## Required inputs + +Requires a data stream that provides latitude and longitude values. + +*** + +## Configuration + +Describe the configuration parameters here + +### Latitude field + +The field containing the latitude value. + +### Longitude field + +The field containing the longitude value. + +### Latitude + +The latitude value of the fixed location + +### Longitude + +The longitude value of the fixed location + +## Output + +Outputs a similar event like below. + +``` +{ + 'distance': 12.5 +} +``` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemaps.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemaps.md new file mode 100644 index 000000000..ce310f573 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemaps.md @@ -0,0 +1,61 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemaps +title: Geo Google Maps Geocoder +sidebar_label: Geo Google Maps Geocoder +--- + + + + + +

+ +

+ +*** + +## Description + +This processor computes the latitude and longitude values from a location (a place name such as "Karlsruhe, Germany +") and adds the result to the event. + +*** + +## Required inputs + +Input event requires to have a field which contains the name of a place. + +*** + +## Configuration + +### Place + +The field of the input event that should be used to compute the lat/lng values. + +## Output + +Outputs a similar event like below. + +``` +{ + 'latitude': 6.927079, + 'longitude': 79.861244 +} +``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemapsstatic.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemapsstatic.md new file mode 100644 index 000000000..6f5a443df --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemapsstatic.md @@ -0,0 +1,62 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemapsstatic +title: Geo Google Maps Static Geocoder +sidebar_label: Geo Google Maps Static Geocoder +--- + + + + + +

+ +

+ +*** + +## Description + +This processor computes the latitude and longitude values from a fixed location (a place name such as "Karlsruhe +, Germany +") and adds the result to the event. + +*** + +## Required inputs + +Input event requires to have a field which contains the name of a place. + +*** + +## Configuration + +### Place + +The place name that should be converted to a lat/lng combination + +## Output + +Outputs a similar event like below. + +``` +{ + 'latitude': 6.927079, + 'longitude': 79.861244 +} +``` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.revgeocoder.geocityname.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.revgeocoder.geocityname.md new file mode 100644 index 000000000..8375bb7e9 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.revgeocoder.geocityname.md @@ -0,0 +1,67 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.latlong.processor.revgeocoder.geocityname +title: Geo City Name Reverse Decoder +sidebar_label: Geo City Name Reverse Decoder +--- + + + + + +

+ +

+ +*** + +## Description + +This processor computes city name based on given lat/lng coordinates that are transmitted as fields from an event. +This processor automatically downloads the file cities1000.zip from Geonames + ( This file is provided under the CC BY 4.0 license). + + + +*** + +## Required inputs + +Input event requires to have latitude and longitude values. + +*** + +## Configuration + +### Latitude + +The field containing the latitude value. + +### Longitude + +The field containing the longitude value. + +## Output + +Outputs a similar event like below. + +``` +{ + 'geoname': 'Colombo' +} +``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.speedcalculator.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.speedcalculator.md new file mode 100644 index 000000000..543f26cdc --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.speedcalculator.md @@ -0,0 +1,59 @@ +--- +id: org.apache.streampipes.processors.geo.jvm.latlong.processor.speedcalculator +title: Geo Speed Calculator +sidebar_label: Geo Speed Calculator +--- + + + + + +

+ +

+ +*** + +## Description + +Calculates the speed (in km/h) based on latitude/longitude values in a data stream. Therefore, it uses the GPS and timestamps values of consecutive events. +It calculates the distance between two points (events) and how much time has passed. Based on those values the speed is calculated. + +*** + +## Required inputs + +Requires a data stream that provides latitude and longitude values as well as a timestamp. + +*** + +## Configuration + +### Timestamp field + +### Latitude field + +### Longitude field + +### Count window +Describes the number of stored events, used for the calculation. +E.g. a value of 5 means that the current event and the event (t-5) are used for the speed calculation. + +## Output +Appends the calculated speed in km/h. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.count.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.count.md new file mode 100644 index 000000000..c2f593fbb --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.count.md @@ -0,0 +1,66 @@ +--- +id: org.apache.streampipes.processors.siddhi.count +title: Count Value Occurrence +sidebar_label: Count Value Occurrence +--- + + + + + +Performs count aggregation with Siddhi CEP engine. + +*** + +## Description + +Performs an aggregation based on a given field and outputs the number of occurrences. +Example: Count the number of vehicle positions per vehicleId. +The Count aggregation requires a time window, used to perform the count aggregation and a field used to aggregate +values. + +*** + +## Required input + +Does not have any specific input requirements. + +*** + +## Configuration + +### FieldToCount +Specifies the field containing the values that should be counted. + +### TimeWindowSize +Specifies the size of the time window and consequently the number of values that are aggregated each time. + +### Time Window Scale +Specifies the scale/unit of the time window. There are three different time scales to choose from: seconds, minutes or hours. + +## Output +The output event is composed of two fields. The field "value" specifies the value to count. +The second field "count" returns the number of occurrences. +Example: +``` +{ + 'value': 'vehicleId', + 'count': 12 +} +``` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.increase.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.increase.md new file mode 100644 index 000000000..200a7f8da --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.increase.md @@ -0,0 +1,65 @@ +--- +id: org.apache.streampipes.processors.siddhi.increase +title: Trend +sidebar_label: Trend +--- + + + + + +

+ +

+ +*** + +## Description + +Detects the increase of a numerical field over a customizable time window. Example: A temperature value increases by 10 percent within 5 minutes. + +*** + +## Required input + +There should be a number field in the event to observe the trend. + +*** + +## Configuration + +### Value to Observe + +Specifies the value field that should be monitored. + +### Increase/Decrease + +Specifies the type of operation the processor should perform. + +### Percentage of Increase/Decrease + +Specifies the increase in percent (e.g., 100 indicates an increase by 100 percent within the specified time window). + +### Time Window Length (Seconds) + +Specifies the size of the time window in seconds. + +## Output + +Outputs the events if there is a trend observed according to the configuration defined. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.listcollector.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.listcollector.md new file mode 100644 index 000000000..f789c1cf0 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.listcollector.md @@ -0,0 +1,51 @@ +--- +id: org.apache.streampipes.processors.siddhi.listcollector +title: List Collector +sidebar_label: List Collector +--- + + + + + +*** + +## Description + +Collects all values from a field within a specified batch window into a list. + +*** + +## Required input + +Does not have any specific input requirements. + +*** + +## Configuration + +### Field + +The field where values should be collected into a list. + +### Batch Window Size + +The batch window size. + +## Output \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.listfilter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.listfilter.md new file mode 100644 index 000000000..2526408c8 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.listfilter.md @@ -0,0 +1,53 @@ +--- +id: org.apache.streampipes.processors.siddhi.listfilter +title: List Filter +sidebar_label: List Filter +--- + + + + + +

+ +

+ +*** + +## Description + +Detects the increase of a numerical field over a customizable time window. Example: A temperature value increases by 10 percent within 5 minutes. + +*** + +## Required input + + +*** + +## Configuration + +Describe the configuration parameters here + +### 1st parameter + + +### 2nd parameter + +## Output \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.numericalfilter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.numericalfilter.md new file mode 100644 index 000000000..fb20b1013 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.numericalfilter.md @@ -0,0 +1,63 @@ +--- +id: org.apache.streampipes.processors.siddhi.numericalfilter +title: Numerical Filter (Siddhi) +sidebar_label: Numerical Filter (Siddhi) +--- + + + + + +

+ +

+ +*** + +## Description +The Numerical Filter processor filters numerical values based on a given threshold. Therefore, it uses the lightweight +CEP engine Siddhi by issuing a Siddhi query, e.g. + +``` +// filter query to filter out all events not satisfying the condition +from inputStreamName[numberField<10] +select * +``` + +*** + +## Required input +The processor works with any input event that has one field containing a numerical value. + +*** + +## Configuration + +### Field +Specifies the field name where the filter operation should be applied on. + + +### Operation +Specifies the filter operation that should be applied on the field. + +### Threshold value +Specifies the threshold value. + +## Output +The processor outputs the input event if it satisfies the filter expression. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.topk.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.topk.md new file mode 100644 index 000000000..beb0d8851 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.siddhi.topk.md @@ -0,0 +1,53 @@ +--- +id: org.apache.streampipes.processors.siddhi.topk +title: Top k +sidebar_label: Top k +--- + + + + + +

+ +

+ +*** + +## Description + +Detects the increase of a numerical field over a customizable time window. Example: A temperature value increases by 10 percent within 5 minutes. + +*** + +## Required input + + +*** + +## Configuration + +Describe the configuration parameters here + +### 1st parameter + + +### 2nd parameter + +## Output \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.chunker.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.chunker.md new file mode 100644 index 000000000..3366fd431 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.chunker.md @@ -0,0 +1,69 @@ +--- +id: org.apache.streampipes.processors.textmining.jvm.chunker +title: Chunker (English) +sidebar_label: Chunker (English) +--- + + + + + +

+ +

+ +*** + +## Description + +Segments given tokens into chunks (e.g. noun groups, verb groups, ...) and appends the found chunks to the stream. + +*** + +## Required input + +Needs a stream with two string list properties: +1. A list of tokens +2. A list of part-of-speech tags (the Part-of-Speech processing element can be used for that) + +*** + +## Configuration + +Assign the tokens and the part of speech tags to the corresponding stream property. +To use this component you have to download or train an openNLP model: +https://opennlp.apache.org/models.html + +## Output + +**Example:** + +Input: +``` +tokens: ["John", "is", "a", "Person"] +tags: ["NNP", "VBZ", "DT", "NN"] +``` + +Output: +``` +tokens: ["John", "is", "a", "Person"] +tags: ["NNP", "VBZ", "DT", "NN"] +chunks: ["John", "is", "a Person"] +chunkType: ["NP", "VP", "NP"]) +``` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.namefinder.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.namefinder.md new file mode 100644 index 000000000..a4c070534 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.namefinder.md @@ -0,0 +1,65 @@ +--- +id: org.apache.streampipes.processors.textmining.jvm.namefinder +title: Name Finder +sidebar_label: Name Finder +--- + + + + + +

+ +

+ +*** + +## Description + +Loads a trained model which finds names like locations or organizations. + +A list of trained models can be found here: http://opennlp.sourceforge.net/models-1.5/.\ +A guide on how to train a new model can be found here: https://opennlp.apache.org/docs/1.9.1/manual/opennlp.html#tools.namefind.training. + +*** + +## Required input + +A stream with a list of tokens from a text. + +*** + +## Configuration + +Configure the Name finder so that the tokens are assigned to the "List of Tokens" property + + +#### Model parameter + +The trained model which should be used to find the names. + +## Output + +Appends a string list property to the stream which contains all found names. + +**Example (with an loaded english person-name-model):** + +Input: `(tokens: ["Hi", "John", "Doe", "is", "here"])` + +Output: `(tokens: ["Hi", "John", "Doe", "is", "here"], foundNames: ["John Doe"])` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.partofspeech.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.partofspeech.md new file mode 100644 index 000000000..13b301128 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.partofspeech.md @@ -0,0 +1,62 @@ +--- +id: org.apache.streampipes.processors.textmining.jvm.partofspeech +title: Part of Speech (English) +sidebar_label: Part of Speech (English) +--- + + + + + +

+ +

+ +*** + +## Description + +Takes in a stream of tokens and marks each token with a part-of-speech tag +The list of used suffixes can be found [here](https://www.ling.upenn.edu/courses/Fall_2003/ling001/penn_treebank_pos.html) + +*** + +## Required input + +A stream with a list property which contains the tokens. + +*** + +## Configuration + +Simply assign the correct output of the previous stream to the part of speech detector input. +To use this component you have to download or train an openNLP model: +https://opennlp.apache.org/models.html + +## Output + +Appends two list properties to the stream: +1. String list: The tag for each token +2. Double list: The confidence for each tag that it is indeed the given tag (between 0 and 1) + +**Example:** + +Input: `(tokens: ["Hi", "Joe"])` + +Output: `(tokens: ["Hi", "Joe"], tags: ["UH", "NNP"], confidence: [0.82, 0.87])` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection.md new file mode 100644 index 000000000..b57ade01a --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection.md @@ -0,0 +1,59 @@ +--- +id: org.apache.streampipes.processors.textmining.jvm.sentencedetection +title: Sentence Detection (English) +sidebar_label: Sentence Detection (English) +--- + + + + + +

+ +

+ +*** + +## Description + +Detects sentences in a text and splits the text accordingly. Only works with english sentences. + +*** + +## Required input + +A stream with a string property which contains a text. + +*** + +## Configuration + +Simply assign the correct output of the previous stream to the tokenizer input. +To use this component you have to download or train an openNLP model: +https://opennlp.apache.org/models.html + +## Output + +Creates for each sentence in a text a new event in which it replaces the text with the sentence. + +**Example:** + +Input: `(text: "Hi, how are you? I am fine!")` + +Output: `(text: "Hi, how are you?")`, `(text: "I am fine!")` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.tokenizer.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.tokenizer.md new file mode 100644 index 000000000..afa55c633 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.textmining.jvm.tokenizer.md @@ -0,0 +1,59 @@ +--- +id: org.apache.streampipes.processors.textmining.jvm.tokenizer +title: Tokenizer (English) +sidebar_label: Tokenizer (English) +--- + + + + + +

+ +

+ +*** + +## Description + +Segments a given text into Tokens (usually words, numbers, punctuations, ...). Works best with english text. + +*** + +## Required input + +A stream with a string property which contains a text. + +*** + +## Configuration + +Simply assign the correct output of the previous stream to the tokenizer input. +To use this component you have to download or train an openNLP model: +https://opennlp.apache.org/models.html + +## Output + +Adds a list to the stream which contains all tokens of the corresponding text. + +**Example:** + +Input: `(text: "Hi, how are you?")` + +Output: `(text: "Hi, how are you?", tokens: ["Hi", ",", "how", "are", "you", "?"])` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter.md new file mode 100644 index 000000000..99649168b --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter.md @@ -0,0 +1,66 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.booloperator.counter +title: Boolean Counter +sidebar_label: Boolean Counter +--- + + + + + +

+ +

+ +*** + +## Description + +This processor monitors a boolean value and counts how often the value of the boolean changes. +A user can configure whether the changes from FALSE to TRUE, TRUE to FALSE, or BOTH changes should be counted. + +*** + +## Required input + +A boolean value is required in the data stream and can be selected with the field mapping. + +### Boolean Field + +The boolean value to be monitored. + +*** + +## Configuration + +A user can configure whether the changes from TRUE to FALSE, FALSE to TRUE, or all changes of the +boolean value should be counted. + +### Flank parameter + +Either: +* TRUE -> FALSE: Increase counter on a true followed by a false +* FALSE -> TRUE: Increase counter on a false followed by a true +* BOTH: Increase counter on each change of the boolean value on two consecutive events + +## Output + +Adds an additional numerical field with the current count value to the event. +Events are just emitted when the counter changes. +Runtime Name: countField \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter.md new file mode 100644 index 000000000..07ccaf2ff --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter.md @@ -0,0 +1,51 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.booloperator.inverter +title: Boolean Inverter +sidebar_label: Boolean Inverter +--- + + + + + +

+ +

+ +*** + +## Description + +This processor requires a boolean value in the data stream and inverts its value. (e.g. true -> flase) + +*** + +## Required input + +### Boolean Field + +The boolean value to be inverted. + +*** + +## Configuration +No further configuration required + +## Output +The output schema is the same as the input schema. Just the value of the property is changed. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.logical.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.logical.md new file mode 100644 index 000000000..9cf0d921b --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.logical.md @@ -0,0 +1,42 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.booloperator.logical +title: Boolean Logical Operator +sidebar_label: Boolean Logical Operator +--- + + + + + +## Description + +This processor performs the logical boolean operation b/w the vales of set of properties. +A user can select the type of operation and set of properties. + +*** + +## Required input + +Type of logical boolean operator and set of properties on which operator needs to perform. + +*** + +## Output + +Outputs the incoming event while appending the result (``boolean-operations-result``) to the incoming event. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping.md new file mode 100644 index 000000000..ff3536006 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping.md @@ -0,0 +1,70 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping +title: Measure Time Between Two Sensors +sidebar_label: Measure Time Between Two Sensors +--- + + + + + +

+ +

+ +*** + +## Description + +This processor can be used to measure the time between two boolean sensors. +For example on a conveyor, where one sensor is placed on the left and one senor placed on the right. +Parts are transported on the conveyor and the sensors are boolean sensors detecting those parts. +The time is measured between the two sensors as well as the amount of complete transportation's is counted. +The measurement is initialized once the left sensor is true and stopped once the right sensor is true. +There can also be multiple parts on the conveyor as long as the individual parts do not change. + + +

+ +

+ +*** + +## Required input +Requires two boolean fields in the datastream. + +### Left Field +The left field starts the timer when value is true. + +### Right Field +The right field stops the timer and emits the event when its value is true. + +*** + +## Configuration +No furhter configuration is required. + +## Output +Appends two fields to the input event. + +### Timer Field +The timer field is a numeric value representing the time between the two sensors. Runtime name: measured_time + +### Counter +The counter indicated how many events where emitted by this component. Runtime name: counter \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer.md new file mode 100644 index 000000000..044810417 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer.md @@ -0,0 +1,58 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.booloperator.timer +title: Boolean Timer +sidebar_label: Boolean Timer +--- + + + + + +

+ +

+ +*** + +## Description + +This processor measures how long a boolean value does not change. +Once the value is changes the event with the measured time is emitted. + + +*** + +## Required input + +A boolean value is required in the data stream. + +### Field + +The boolean field which is monitored for state changes. + +*** + +## Configuration + +### Timer value +Define whether it should be measured how long the value is true or how long the value is false. + +## Output +Appends a field with the time how long the value did not change. +Is emitted on the change of the boolean value. Runtime name: measured_time diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.changed-value.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.changed-value.md new file mode 100644 index 000000000..9b5f1ac10 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.changed-value.md @@ -0,0 +1,46 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.changed-value +title: Value Changed +sidebar_label: Value Changed +--- + + + + + +

+ +

+ +*** + +## Description + +This processor sends out an event everytime a specific object changes. +It also adds a timestamp in ms from the system time. + +*** + +## Configuration +Select property to monitor for changes + +Describe the configuration parameters here + +## Output +Emit an event on change and append a timestamp when the change occured \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.count-array.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.count-array.md new file mode 100644 index 000000000..b32bb7823 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.count-array.md @@ -0,0 +1,55 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.count-array +title: Count Array +sidebar_label: Count Array +--- + + + + + +

+ +

+ +*** + +## Description + +This processor takes a list field, computes the size of the list and appends the result to the event. + +*** + +## Required input + +This processor works with any event that has a field of type ``list``. + +*** + +## Configuration + +Describe the configuration parameters here + +### List Field + +The field containing the list that should be used. + +## Output + +Outputs the incoming event while appending the list size (named ``countValue``) to the incoming event. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata.md new file mode 100644 index 000000000..9c4871a5d --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata.md @@ -0,0 +1,77 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.csvmetadata +title: CSV Metadata Enricher +sidebar_label: CSV Metadata Enricher +--- + + + + +Enrich a datastream with information provided in a CSV file. +The data of the CSV file is matched by an id column with a property value of a String in the data stream. + +*** + +## Description +Upload a CSV file with static meta information that will be appended to each event. +The file can contain different information for different keys in the stream. + + +### Structure of CSV file +The first row containes the runtime names for the properties to insert. +Once the file is uploaded the user can select which column to use for the matching property +and which values should be appended. +Delimiter: ';' + + +*** + +## Example +Add the location of a production line to the event + +### Input event +``` +{ + 'line_id': 'line1', + 'timestamp': 1586378041 +} +``` + +### CSV File +``` +production_line;location +line1;germany +line2;uk +line3;usa +``` + +### Configuration +* The field that is used for the lookup (Example: line_id) +* The CSV file (Example: Upload the csv file) +* Field to match (Example: production_line) +* Fields to append (Example: location) + +### Output event +``` +{ + 'line_id': 'line1', + 'timestamp': 1586378041, + 'location': 'germany' +} +``` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.datetime.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.datetime.md new file mode 100644 index 000000000..4b237cd43 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.datetime.md @@ -0,0 +1,78 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.datetime +title: Datetime From String +sidebar_label: Datetime From String +--- + + + + + +

+ +

+ +*** + +## Overview + +The "Datetime From String" processor is a handy tool that helps convert human-readable datetime information into a +format that machines can understand. This is particularly useful when dealing with data that includes dates and times. + +### Why Use This Processor? + +In the context of event streams, you may encounter dates and times formatted for human readability but not necessarily +optimized for computer processing. The "Datetime From String" processor addresses this by facilitating the conversion +of human-readable datetime information within your continuous stream of events. + +*** + +## How It Works + +When you input a data stream into this processor containing a datetime in a specific format (such as "2023-11-24 15:30: +00"), it +undergoes a transformation. The processor converts it into a computer-friendly format called a ZonedDateTime object. + +### Example + +Let's say you have an event stream with a property containing values like "2023-11-24 15:30:00" and you want to make +sure your computer understands it. You can use +this processor to convert it into a format that's machine-friendly. + +*** + +## Getting Started + +To use this processor, you need one thing in your data: + +1. **Datetime String**: This is the name of the event property that contains the human-readable datetime string, like "2023-11-24 15:30:00". + + +### Configuration + +The only thing you need to configure is the time zone. +1. **Time Zone**: Specify the time zone that applies to your datetime if it doesn't already have this information.This ensures that the processor understands the context of your +datetime. + +## Output + +After the conversion happens, the processor adds a new piece of information to your data stream: + +* **timestringInMillis**: This is the transformed datetime in a format that computers can easily work with (UNIX timestamp in milliseconds). +* **timeZone**: The name of the timezone the `dateTime` value refers to. Can be used to reconstitute the actual time. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.duration-value.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.duration-value.md new file mode 100644 index 000000000..0d1a66b2e --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.duration-value.md @@ -0,0 +1,51 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.duration-value +title: Calculate Duration +sidebar_label: Calculate Duration +--- + + + + + +

+ +

+ +*** + +## Description + +This processor calculates the duration for a given stream with a start timestamp and an end timestamp. + +*** + +## Required input +Two timestamp fields + +*** + +## Configuration + +* Start Timestamp: The first timestamp (t1) +* End Timestamp: The second timestamp (t2) +* Time Unit of the result + +## Output +Appends a new field with the difference of t2 and t1 \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.field-mapper.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.field-mapper.md new file mode 100644 index 000000000..b903eb206 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.field-mapper.md @@ -0,0 +1,74 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.field-mapper +title: Field Mapper +sidebar_label: Field Mapper +--- + + + + + +

+ +

+ +*** + +## Description + +Replaces one or more fields with a new field and computes a hash value of these fields + +*** + +## Configuration + +* Fields: Fields that will be mapped into a property +* Name of the new field + +*** + +## Example + +Merge two fields into a hash value + +### Input event + +``` +{ + "timestamp":1586380104915, + "mass_flow":4.3167, + "temperature":40.05, + "sensorId":"flowrate01" +} +``` + +### Configuration + +* Fields: mass_flow, temperature +* Name of new field: demo + +### Output event + +``` +{ + "timestamp":1586380104915, + "sensorId":"flowrate01" + "demo":"8ae11f5c83610104408d485b73120832", +} +``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.fieldhasher.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.fieldhasher.md new file mode 100644 index 000000000..b4b0d6903 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.fieldhasher.md @@ -0,0 +1,55 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.fieldhasher +title: Field Hasher +sidebar_label: Field Hasher +--- + + + + + +

+ +

+ +*** + +## Description + +The Field Hasher uses an algorithm to encode values in a field. +The Field Hasher can use MD5, SHA1 or SHA2 to hash field values. + +*** + +## Required input +This processor requires at least one field of type string. + +*** + +## Configuration + +### Field +Specifies the string field that will be encoded. + +### Hash Algorithm +Specifies the algorithm used to encode the string field. The following algorithms +are available: SHA2, MD5 or SHA1. + +## Output +The encoded string field. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.fieldrename.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.fieldrename.md new file mode 100644 index 000000000..8d8e463e3 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.fieldrename.md @@ -0,0 +1,59 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.fieldrename +title: Field Renamer +sidebar_label: Field Renamer +--- + + + + + + +*** + +## Description + +Replaces the runtime name of an event property with a custom defined name. +Useful for data ingestion purposes where a specific event schema is required. + + +*** + +### OldFieldName +Specifies the field to rename. + +### NewFieldName +Specifies the new runtime name of the field. + +## Output +Example: + +Old Output: +``` +{ + 'timestamp': 16003000, +} +``` + +New Ouput: +``` +{ + 'time': 16003000, +} +``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.measurementunitconverter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.measurementunitconverter.md new file mode 100644 index 000000000..303309c42 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.measurementunitconverter.md @@ -0,0 +1,53 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.measurementunitconverter +title: Measurement Unit Converter +sidebar_label: Measurement Unit Converter +--- + + + + + +

+ +

+ +*** + +## Description + +Converts a unit of measurement to another one. + +*** + +## Required input + + +*** + +## Configuration + +Describe the configuration parameters here + +### 1st parameter + + +### 2nd parameter + +## Output \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge.md new file mode 100644 index 000000000..09c0a4489 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge.md @@ -0,0 +1,58 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge +title: Signal Edge Filter +sidebar_label: Signal Edge Filter +--- + + + + + +

+ +

+ +*** + +## Description + +Observes a boolean value and forwards the event when a signal edge is detected + +*** + +## Required input + +### Boolean Field +Boolean field that is observed + +*** + +## Configuration +### Kind of edge +* Detect rising edges +* Detect falling edges +* Detect both + +### Delay +Defines for how many events the signal must be stable before result is emitted. +(E.g. if set to 2, the result is not emitted if value toggles between true and false, +it fires when two consecutive events are detected after the flank) + +## Output +Emits input event, when the signal edge is detected diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state.md new file mode 100644 index 000000000..7eb96ae8b --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state.md @@ -0,0 +1,63 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state +title: Boolean To State +sidebar_label: Boolean To State +--- + + + + + +

+ +

+ +*** + +## Description + +Converts boolean fields to a state string representing the current state of the system. +This processor requires one or multiple boolean values in the data stream. +For the selected value which is true, the runtime name is added as the state field. +*** + +## Required input + +### Boolean Fields + +Boolean fields that are converted to the state when true + +### Default State + +When all boolean values are false, a default state can be defined + +### Mapping Configuration + +Configuration to provide a string mapping for each possible value. +On the left ist the value of the runtime name and on the right the new value (e.g. {"runtimeName": "newValue"}). + +*** + +## Configuration + +No further configuration required + +## Output + +The output contains a new value with the string values of the state diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number.md new file mode 100644 index 000000000..021bee3c8 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number.md @@ -0,0 +1,58 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number +title: Number Labeler +sidebar_label: Number Labeler +--- + + + + + +

+ +

+ +*** + +## Description + +Apply a rule to a value of a field. (E.g. when minimum value is lower then 10, add label `not ok` else add label `ok`) + +*** + +## Required input + +Requires a sensor value + +### Sensor value + +A number representing the current sensor value. + +*** + +## Configuration + +### Condition +Define a rule which label to add. Example: `<;5;nok` means when the calculated value is smaller then 5 add label ok. +The default label can be defined with `*;nok`. +The first rule that is true defines the label. Rules are applied in the same order as defined here. + + +## Output +Appends a new field with the label defined in the Condition Configuration diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.staticmetadata.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.staticmetadata.md new file mode 100644 index 000000000..6400ed659 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.staticmetadata.md @@ -0,0 +1,74 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.processor.staticmetadata +title: Static Metadata Enricher +sidebar_label: Static Metadata Enricher +--- + + + + +Enrich a data stream by dynamically adding fields based on user-provided static metadata configuration. + +--- + +## Description + +The Static Metadata Enricher is designed to enrich a data stream by dynamically adding fields based on user-provided +metadata configuration. Users can specify static properties, and the processor will process each event, adding fields +according to the provided key-value pairs. The output strategy is determined dynamically based on the provided metadata. +For added convenience, users also have the option of uploading a CSV file with metadata information. + +### Configuration + +For each metadata entry, configure the following three options: + +- **Runtime Name:** A unique identifier for the property during runtime. +- **Value:** The value associated with the property. +- **Data Type:** The data type of the property value. + +#### Using CSV Option + +Alternatively, you can utilize the CSV upload feature by creating a CSV file with the following format: + +``` +Runtime Name,Runtime Value,Data Type +sensorType,Temperature,String +maxSensorValue,100.0,Float +minSensorValue,0,Float +``` + +## Example +### Input Event + +```json +{ + "reading": 25.5 +} +``` + +### Output Event + +```json +{ + "reading": 25.5, + "sensorType": "Temperature", + "maxSensorValue": 100.0, + "minSensorValue": 0.0 +} +``` diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state.md new file mode 100644 index 000000000..02fbfa2cb --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state.md @@ -0,0 +1,51 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state +title: String To State +sidebar_label: String To State +--- + + + + + +

+ +

+ +*** + +## Description + +Convert string fields to a state representing the current state of the system. +This processor requires one or multiple string values in the data stream. +For each of the selected values is added to the states field. +*** + +## Required input + +### String Fields +String fields that are added to the state array + +*** + +## Configuration +No further configuration required + +## Output +The output contains a new value with the string values of the state diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor.md new file mode 100644 index 000000000..6aa99cfed --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor.md @@ -0,0 +1,58 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor +title: Timestamp Extractor +sidebar_label: Timestamp Extractor +--- + + + + + +

+ +

+ +*** + +## Description + +This processor extracts a timestamp into the individual time fields (e.g. day field, hour field, ....) + +*** + +## Required input + +This processor requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema +.org/DateTime``. + +*** + +## Configuration + +### Timestamp Field + +The field of the event containing the timestamp to parse. + +### Extract Fields + +Select the individual parts of the timestamp that should be extracted, e.g., Year, Minute and Day. + +## Output + +The output of this processor is a new event that contains the fields selected by the ``Extract Fields`` parameter. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.round.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.round.md new file mode 100644 index 000000000..3913f9fe5 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.round.md @@ -0,0 +1,72 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.round +title: Numeric Rounding +sidebar_label: Numeric Rounding +--- + + + + + +

+ +

+ +*** + +## Description + +This processor rounds numeric values to the given decimal places. +It supports multiple rounding strategies. + +*** + +## Required input + +This processor requires an event that provides numerical properties. + +*** + +## Configuration + +### Fields to Be Rounded + +Select which fields of the event should be rounded. + +### Number of Digits + +Specify the number of digits after the decimal point to round/keep, e.g., if number is 2.8935 and 'digits' is 3, +the result will be 2.894. + +### Mode of Rounding + +Specify the mode of rounding. +Supported rounding modes: +* `UP`: Rounding mode to round away from zero. Always increments the digit prior to a non-zero discarded fraction. Note that this rounding mode never decreases the magnitude of the calculated value. +* `DOWN`: Rounding mode to round towards zero. Never increments the digit prior to a discarded fraction (i.e., truncates). Note that this rounding mode never increases the magnitude of the calculated value. +* `CEILING`: Rounding mode to round towards positive infinity. If the result is positive, behaves as for `UP`; if negative, behaves as for `DOWN`. Note that this rounding mode never decreases the calculated value +* `FLOOR`: Rounding mode to round towards negative infinity. If the result is positive, behave as for `DOWN`; if negative, behave as for `UP`. Note that this rounding mode never increases the calculated value. +* `HALF_UP`: Rounding mode to round towards "nearest neighbor" unless both neighbors are equidistant, in which case round up. Behaves as for `UP` if the discarded fraction is ≥ 0.5; otherwise, behaves as for `DOWN`. +* `HALF_DOWN`: Rounding mode to round towards "nearest neighbor" unless both neighbors are equidistant, in which case round down. Behaves as for `UP` if the discarded fraction is > 0.5; otherwise, behaves as for `DOWN`. +* `HALF_EVEN`: Rounding mode to round towards the "nearest neighbor" unless both neighbors are equidistant, in which case, round towards the even neighbor. Behaves as for `HALF_UP` if the digit to the left of the discarded fraction is odd; behaves as for `HALF_DOWN` if it's even. Note that this is the rounding mode that statistically minimizes cumulative error when applied repeatedly over a sequence of calculations. + +## Output + +The output of this processor is the same event with the fields selected by the ``Fiels to Be Rounded`` parameter rounded +to ``Number of Digits`` digits. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.split-array.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.split-array.md new file mode 100644 index 000000000..47e0b6d08 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.split-array.md @@ -0,0 +1,60 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.split-array +title: Split Array +sidebar_label: Split Array +--- + + + + + +

+ +

+ +*** + +## Description + +This processor takes an array of event properties and creates an event for each of them. +Further property of the events can be added to each element. + +*** + +## Required input + +This processor works with any event that has a field of type ``list``. + +*** + +## Configuration + +### Keep Fields + +Fields of the event that should be kept in each resulting event. + +### List field + +The name of the field that contains the list values that should be split. + + +## Output + +This data processor produces an event with all fields selected by the ``Keep Fields`` parameter and all fields of the + selected list field. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter.md new file mode 100644 index 000000000..2b5b1a725 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter.md @@ -0,0 +1,65 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.stringoperator.counter +title: String Counter +sidebar_label: String Counter +--- + + + + + +

+ +

+ +*** + +## Description + +This processor monitors a string field and counts how often the value of the string changes. +Hereby, a change is characterized by +the value of the field before and the value after the change, combined forming a pair. +The processor keeps track of the counter for each pair. + +*** + +## Required input + +A string field is required in the data stream and can be selected with the field mapping. + +### String Field + +The string field to be monitored. + +*** + +## Configuration + +(no further configuration required) + +## Output + +The following three fields are appended to the event: + +* [counter] numerical field with the current count value for the given value pair +* [change_from] the value of the string before the change +* [change_to] the value of the string after the change + +The event is emitted whenever the value of the string field changes. + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer.md new file mode 100644 index 000000000..6ce9c78ad --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer.md @@ -0,0 +1,66 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.stringoperator.timer +title: String Timer +sidebar_label: String Timer +--- + + + + + +

+ +

+ +*** + +## Description + +This processor measures how long a value of a string field does not change. +Once the value is changes the event with the measured time and the corresponding string value is emitted. + + +*** + +## Required input + +A string field is required in the data stream. + +### Field + +The string field which is monitored for any value changes. + + +*** + +## Configuration + +### Output Frequency + +Define when an event should be emitted, either on each input event or just when the string value changes. + +## Output + +The following two fields are appended to the event: + +* [measured_time] the measured time for the string value to not change +* [field_value] the corresponding string value + +The event is emitted whenever the value of the string field changes. + diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.taskduration.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.taskduration.md new file mode 100644 index 000000000..245339056 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.taskduration.md @@ -0,0 +1,50 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.taskduration +title: Task Duration +sidebar_label: Task Duration +--- + + + + + + +*** + +## Description + +This processors computes the duration of a task, i.e., a field containing a task description. It outputs an event + every time this task value changes and computes the duration between the first occurrence of this task and the + current event. For instance, you can use this event to calculate the time a specific process step requires. +*** + +## Required input + +A timestamp value is required and a field containing a task value. + +*** + +## Configuration + +(no further configuration required) + +## Output + +Emits an event that contains the process step, built from the names of the first task identifier and the identifier + of the subsequent task. In addition, the duration is part of the output event, provided in milliseconds. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean.md new file mode 100644 index 000000000..8195cb36b --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean.md @@ -0,0 +1,53 @@ +--- +id: org.apache.streampipes.processors.transformation.jvm.transform-to-boolean +title: Transform to boolean +sidebar_label: Transform to boolean +--- + + + + + +

+ +

+ +*** + +## Description + +This processors transforms numbers and strings to boolean values. + + +*** + +## Required input + +A string with the values "true", "True", "false", "False" or a number with value 1.0, 1, 0, or 0.0 + +*** + +## Configuration + +Select fields that should be converted to boolean. + +## Output + +Selected properties of input events are transformed to booleans. +When the value is not valid an error message is logged and the event is discarde. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest.md new file mode 100644 index 000000000..d1ddcf1ac --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest.md @@ -0,0 +1,58 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.bufferrest +title: Buffered REST Publisher +sidebar_label: Buffered REST Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Collects a given amount of events into a JSON array. Once this event count is reached +the JSON array is posted to the given REST interface. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### REST URL + +The complete URL of the REST endpoint. + +### Buffer Size + +The amount of events before sending. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.jms.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.jms.md new file mode 100644 index 000000000..9116688c9 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.jms.md @@ -0,0 +1,60 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.jms +title: JMS Publisher +sidebar_label: JMS Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Publishes events to a message broker (e.g., ActiveMQ) using the Java Message Service (JMS) protocol. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### JMS Broker Settings + +The basic settings to connect to the broker. +The JMS broker URL indicates the URL of the broker (e.g., tcp://localhost), the port indicates the port of the broker + (e.g., 61616) + + +### JMS Topic + +The topic where events should be sent to. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.kafka.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.kafka.md new file mode 100644 index 000000000..3fa00b57e --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.kafka.md @@ -0,0 +1,61 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.kafka +title: Kafka Publisher +sidebar_label: Kafka Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Publishes events to Apache Kafka. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Kafka Broker Settings + +The basic settings to connect to the broker. +The Kafka broker URL indicates the URL of the broker (e.g., localhost), the port indicates the port of the broker + (e.g., 9092) + + +### Kafka Topic + +The topic where events should be sent to. + + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.mqtt.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.mqtt.md new file mode 100644 index 000000000..962082c26 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.mqtt.md @@ -0,0 +1,61 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.mqtt +title: MQTT Publisher +sidebar_label: MQTT Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Publishes events to MQTT. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### MQTT Broker Settings + +The basic settings to connect to the broker. +The MQTT broker URL indicates the URL of the broker (e.g., localhost), the port indicates the port of the broker +(e.g., 1883) + + +### MQTT Topic + +The topic where events should be sent to. + + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.nats.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.nats.md new file mode 100644 index 000000000..7f1c932f1 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.nats.md @@ -0,0 +1,78 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.nats +title: NATS Publisher +sidebar_label: NATS Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Publishes events to NATS broker. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### NATS Subject + +The subject (topic) where events should be sent to. + +### NATS Broker URL + +The URL to connect to the NATS broker. It can be provided multiple urls separated by commas(,). + (e.g., nats://localhost:4222,nats://localhost:4223) + +### Username + +The username to authenticate the client with NATS broker. + +It is an optional configuration. + +### NATS Broker URL + +The password to authenticate the client with NATS broker. + +It is an optional configuration. + +### NATS Connection Properties + +All other possible connection configurations that the nats client can be created with. +It can be provided as key value pairs separated by colons(:) and commas(,). + (e.g., io.nats.client.reconnect.max:1, io.nats.client.timeout:1000) + +It is an optional configuration. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.pulsar.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.pulsar.md new file mode 100644 index 000000000..ea444d595 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.pulsar.md @@ -0,0 +1,63 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.pulsar +title: Pulsar Publisher +sidebar_label: Pulsar Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Publishes events to Apache Pulsar. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Pulsar Broker Hostname + +The hostname to connect to the broker. + +### Pulsar Broker Port + +The port to connect to the broker (e.g., 6650) + + +### Pulsar Topic + +The topic where events should be sent to. + + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq.md new file mode 100644 index 000000000..0da1aa89f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq.md @@ -0,0 +1,73 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.rabbitmq +title: RabbitMQ Publisher +sidebar_label: RabbitMQ Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Forwards events to a RabbitMQ broker + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Host + +The hostname of the RabbitMQ broker. + +### Port + +The port of the RabbitMQ broker. + +### User + +The username used to connect to the RabbitMQ broker. + +### Password + +The password used to connect to the RabbitMQ broker. + +### Exchange Name + +The name of the exchange. + +### RabbitMQ Topic + +The topic where events should be sent to. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rest.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rest.md new file mode 100644 index 000000000..71d792398 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rest.md @@ -0,0 +1,52 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.rest +title: REST Publisher +sidebar_label: REST Publisher +--- + + + + +

+ +

+ +*** + +## Description + +Posts a JSON representation of an event to a REST interface. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### REST URL + +The complete URL of the REST endpoint. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rocketmq.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rocketmq.md new file mode 100644 index 000000000..a3e0e27b1 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.rocketmq.md @@ -0,0 +1,59 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.rocketmq +title: RocketMQ Publisher +sidebar_label: RocketMQ Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Publishes events to Apache RocketMQ. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### RocketMQ Endpoint + +The endpoint to connect to the broker. + + +### RocketMQ Topic + +The topic where events should be sent to. + + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.tubemq.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.tubemq.md new file mode 100644 index 000000000..34dc0222f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.tubemq.md @@ -0,0 +1,61 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.tubemq +title: TubeMQ (InLong) Publisher +sidebar_label: TubeMQ (InLong) Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Publishes events to Apache TubeMQ (InLong). + +*** + +## Required Inputs + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### TubeMQ Master Information + +This field describes the endpoints of all the TubeMQ masters. + +The format should be like `ip1:port1,ip2:port2,ip3:port3`. + + +### TubeMQ Topic + +The topic where events should be sent to. + + +## Output + +(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.websocket.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.websocket.md new file mode 100644 index 000000000..80706526c --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.brokers.jvm.websocket.md @@ -0,0 +1,52 @@ +--- +id: org.apache.streampipes.sinks.brokers.jvm.websocket +title: Websocket Server +sidebar_label: Websocket Server +--- + + + + +*** + +

+ +

+ +## Description + +Send a message to a connected websocket client + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Port + +The port on which the websocket listens for connections + +## Output + +(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.ditto.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.ditto.md new file mode 100644 index 000000000..0726af127 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.ditto.md @@ -0,0 +1,73 @@ +--- +id: org.apache.streampipes.sinks.databases.ditto +title: Eclipse Ditto +sidebar_label: Eclipse Ditto +--- + + + + + +

+ +

+ +*** + +## Description + +Forwards events to the Eclipse Ditto API. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Fields to send + +The fields that should be stored as a property to Ditto endpoint. + +### Ditto API endpoint + +The endpoint URL of the Ditto instance. + +### Username + +The username to authenticate the Ditto endpoint. + +### Password + +The password to authenticate the Ditto endpoint. + +### Thing ID + +The Ditto thing ID. + +#### Feature ID + +The Ditto feature ID + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.couchdb.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.couchdb.md new file mode 100644 index 000000000..eb84223bb --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.couchdb.md @@ -0,0 +1,63 @@ +--- +id: org.apache.streampipes.sinks.databases.jvm.couchdb +title: CouchDB +sidebar_label: CouchDB +--- + + + + + +

+ +

+ +*** + +## Description + +Stores events in an Apache CouchDB database. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +Describe the configuration parameters here + +### Hostname + +The hostname of the CouchDB instance. + +### Port + +The port of the CouchDB instance. + +### Database Name + +The name of the database where events will be stored + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.influxdb.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.influxdb.md new file mode 100644 index 000000000..f712edec6 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.influxdb.md @@ -0,0 +1,85 @@ +--- +id: org.apache.streampipes.sinks.databases.jvm.influxdb +title: InfluxDB +sidebar_label: InfluxDB +--- + + + + + +

+ +

+ +*** + +## Description + +Stores events in an InfluxDB. + +*** + +## Required input + +This sink requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema +.org/DateTime``. + +*** + +## Configuration + +### Hostname + +The hostname/URL of the InfluxDB instance. (Include http(s)://). + +### Port + +The port of the InfluxDB instance. + +### Database Name + +The name of the database where events will be stored. + +### Measurement Name + +The name of the Measurement where events will be stored (will be created if it does not exist). + +### Username + +The username for the InfluxDB Server. + +### Password + +The password for the InfluxDB Server. + +### Timestamp Field + +The field which contains the required timestamp. + +### Buffer Size + +Indicates how many events are written into a buffer, before they are written to the database. + +### Maximum Flush + +The maximum waiting time for the buffer to fill the Buffer size before it will be written to the database in ms. +## Output + +(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.iotdb.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.iotdb.md new file mode 100644 index 000000000..777126236 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.iotdb.md @@ -0,0 +1,91 @@ +--- +id: org.apache.streampipes.sinks.databases.jvm.iotdb +title: Apache IoTDB +sidebar_label: Apache IoTDB +--- + + + + + +

+ +

+ +*** + +## Description + +Stores events in a IoTDB database. + +Events will be stored in timeseries `root.${Database Name}.${Device (Entity) Name}.${Event Key}`. + +Please reference to [https://iotdb.apache.org/](https://iotdb.apache.org/) for more information. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Hostname + +The hostname of the IoTDB instance. + +### Port + +The port of the IoTDB instance (default 6667). + +### Username + +The username for the IoTDB Server. + +### Password + +The password for the IoTDB Server. + +### **Database Name** + +The name of the database where events will be stored (will be created if it does not exist). + +A database is a group of devices (entities). Users can create any prefix path as a database. + +### **Device (Entity) Name** + +The name of the device (entity) where events will be stored. + +A device (also called entity) is an equipped with measurements in real scenarios. In IoTDB, all measurements should have +their corresponding devices. + +### **Measurements** + +All keys of fields in an event will be automatically converted to suffixes of timeseries. + +A measurement is information measured by detection equipment in an actual scene and can transform the sensed information +into an electrical signal or other desired form of information output and send it to IoTDB. + +In IoTDB, all data and paths stored are organized in units of measurements. + +## Output + +(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.opcua.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.opcua.md new file mode 100644 index 000000000..b2e9cda47 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.opcua.md @@ -0,0 +1,65 @@ +--- +id: org.apache.streampipes.sinks.databases.jvm.opcua +title: OPC-UA +sidebar_label: OPC-UA +--- + + + + + +

+ +

+ +*** + +## Description + +Allows to write events to an OPC-UA server. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Hostname + +The hostname of the OPC-UA server. + +### Port + +The port of the OPC-UA server. + +### Namespace Index + +The namespace index in which the node should be written + +### Node Id + +The node id of the resulting node + +### Number Mapping + +The property of the event that should be written to the OPC-UA server diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.postgresql.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.postgresql.md new file mode 100644 index 000000000..439f9bc65 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.postgresql.md @@ -0,0 +1,73 @@ +--- +id: org.apache.streampipes.sinks.databases.jvm.postgresql +title: PostgreSQL +sidebar_label: PostgreSQL +--- + + + + + +

+ +

+ +*** + +## Description + +Stores events in a Postgres database. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Hostname + +The hostname of the PostgreSQL instance. + +### Port + +The port of the PostgreSQL instance (default 5432). + +### Database Name + +The name of the database where events will be stored + +### Table Name + +The name of the table where events will be stored (will be created if it does not exist) + +### Username + +The username for the PostgreSQL Server. + +### Password + +The password for the PostgreSQL Server. + +## Output + +(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.redis.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.redis.md new file mode 100644 index 000000000..cee98444d --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.databases.jvm.redis.md @@ -0,0 +1,87 @@ +--- +id: org.apache.streampipes.sinks.databases.jvm.redis +title: Redis +sidebar_label: Redis +--- + + + + + +

+ +

+ +*** + +## Description + +Stores events in a Redis key-value store. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +Describe the configuration parameters here + +### Hostname +The hostname of the Redis instance + +### Port +The port of the Redis instance (default 6379) + +### Key Field +Runtime field to be used as the key when storing the event. If auto-increment is enabled, this setting will be ignored. + +### Auto Increment +Enabling this will generate a sequential numeric key for every record inserted. (note: enabling this will ignore Key Field) + +### Expiration Time (Optional) +The expiration time for a persisted event. + +### Password (Optional) +The password for the Redis instance. + +### Connection Name (Optional) +A connection name to assign for the current connection. + +### DB Index (Optional) +Zero-based numeric index for Redis database. + +### Max Active (Redis Pool) (Optional) +The maximum number of connections that can be allocated from the pool. + +### Max Idle (Redis Pool) (Optional) +The maximum number of connections that can remain idle in the pool. + +### Max Wait (Redis Pool) (Optional) +The maximum number of milliseconds that the caller needs to wait when no connection is available. + +### Max Timeout (Redis Pool) (Optional) +The maximum time for connection timeout and read/write timeout. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.internal.jvm.datalake.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.internal.jvm.datalake.md new file mode 100644 index 000000000..44a091864 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.internal.jvm.datalake.md @@ -0,0 +1,68 @@ +--- +id: org.apache.streampipes.sinks.internal.jvm.datalake +title: Data Lake +sidebar_label: Data Lake +--- + + + + + +

+ +

+ +*** + +## Description + +Stores events in the internal data lake so that data can be visualized in the live dashboard or in the data explorer. +Simply create a pipeline with a data lake sink, switch to one of the data exploration tool and start exploring your +data! + +*** + +## Required input + +This sink requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema +.org/DateTime``. + +*** + +## Configuration + +### Identifier + +The name of the measurement (table) where the events are stored. + +### Schema Update Options + +The Schema Update Options dictate the behavior when encountering a measurement (table) with the same identifier. + +#### Option 1: Update Schema + +- **Description:** Overrides the existing schema. +- **Effect on Data:** The data remains in the data lake, but accessing old data is restricted to file export. +- **Impact on Features:** Other StreamPipes features, such as the Data Explorer, will only display the new event schema. + +#### Option 2: Extend Existing Schema + +- **Description:** Keeps old event fields in the event schema. +- **Strategy:** This follows an append-only strategy, allowing continued work with historic data. +- **Consideration:** Old properties may exist for which no new data is generated. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.internal.jvm.notification.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.internal.jvm.notification.md new file mode 100644 index 000000000..f29391c1c --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.internal.jvm.notification.md @@ -0,0 +1,63 @@ +--- +id: org.apache.streampipes.sinks.internal.jvm.notification +title: Notification +sidebar_label: Notification +--- + + + + + +

+ +

+ +*** + +## Description + +Displays a notification in the UI panel of StreamPipes. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +### Notification Title + +The title of the notification. + +### Content + +The notification message. + +### Silent Period + +The *Silent Period* is the duration, expressed in minutes, during which notifications are temporarily disabled after one +has been sent. This feature is implemented to prevent overwhelming the target with frequent notifications, avoiding +potential spam behavior. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.email.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.email.md new file mode 100644 index 000000000..72393be3f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.email.md @@ -0,0 +1,73 @@ +--- +id: org.apache.streampipes.sinks.notifications.jvm.email +title: Email Notification +sidebar_label: Email Notification +--- + + + + + +

+ +

+ +*** + +## Description + +This sink sends an email to a specified receiver. + +Before you use this sink, the settings of your email server need to be configured. +After you've installed the element, navigate to ``Settings``, open the panel ``Sinks Notifications JVM`` and add your +mail server and credentials. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +The following configuration is required: + +### Receiver Address + +The email address of the receiver. + +### Subject + +The subject of the email. + +### Content + +The mail text. + +### Silent Period + +The *Silent Period* is the duration, expressed in minutes, during which notifications are temporarily disabled after one +has been sent. This feature is implemented to prevent overwhelming the target with frequent notifications, avoiding +potential spam behavior. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.msteams.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.msteams.md new file mode 100644 index 000000000..2090564e0 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.msteams.md @@ -0,0 +1,86 @@ +--- +id: org.apache.streampipes.sinks.notifications.jvm.msteams +title: MS Teams Sink +sidebar_label: MS Teams Sink +--- + + + +# MS Teams Sink + +

+ +

+ +--- + + + +The MS Teams Sink is a StreamPipes data sink that facilitates the sending of messages to a Microsoft Teams channel +through a Webhook URL. Whether you need to convey simple text messages or employ more advanced formatting with [Adaptive +Cards](https://adaptivecards.io/), this sink provides a versatile solution for integrating StreamPipes with Microsoft Teams. + +--- + +## Required input + +The MS Teams Sink does not have any specific requirements for incoming event types. It is designed to work seamlessly +with any type of incoming event, making it a versatile choice for various use cases. + +--- + +## Configuration + +#### Webhook URL + +To configure the MS Teams Sink, you need to provide the Webhook URL that enables the sink to send messages to a specific +MS Teams channel. If you don't have a Webhook URL, you can learn how to create +one [here](https://learn.microsoft.com/en-us/microsoftteams/platform/webhooks-and-connectors/how-to/add-incoming-webhook?tabs=dotnet#create-incoming-webhooks-1). + +#### Message Content Options + +You can choose between two message content formats: + +- **Simple Message Content:** Supports plain text and basic markdown formatting. +- **Advanced Message Content:** Expects JSON input directly forwarded to Teams without modification. This format is + highly customizable and can be used for Adaptive Cards. + +Choose the format that best suits your messaging needs. + +### Silent Period + +The *Silent Period* is the duration, expressed in minutes, during which notifications are temporarily disabled after one +has been sent. This feature is implemented to prevent overwhelming the target with frequent notifications, avoiding +potential spam behavior. + +--- + +## Usage + +#### Simple Message Format + +In the simple message format, you can send plain text messages or utilize basic markdown formatting to convey +information. This is ideal for straightforward communication needs. + +#### Advanced Message Format + +For more sophisticated messaging requirements, the advanced message format allows you to send JSON content directly to +Microsoft Teams without modification. This feature is especially powerful when used +with [Adaptive Cards](https://learn.microsoft.com/en-us/adaptive-cards/), enabling interactive and dynamic content in +your Teams messages. diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.onesignal.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.onesignal.md new file mode 100644 index 000000000..0c4d6427d --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.onesignal.md @@ -0,0 +1,63 @@ +--- +id: org.apache.streampipes.sinks.notifications.jvm.onesignal +title: OneSignal +sidebar_label: OneSignal +--- + + + + + +

+ +

+ +*** + +## Description + +This sink sends a push message to the OneSignal application + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +Describe the configuration parameters here + +### App Id + +The OneSignal application ID. + +### API Key + +The OneSignal API key. + +### Content + +The message that should be sent to OneSignal + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.slack.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.slack.md new file mode 100644 index 000000000..7336200f6 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.slack.md @@ -0,0 +1,66 @@ +--- +id: org.apache.streampipes.sinks.notifications.jvm.slack +title: Slack Notification +sidebar_label: Slack Notification +--- + + + + + +

+ +

+ +*** + +## Description + +Slack bot to send notifications directly into your slack + +Before you use this sink, the Slack token needs to be configured. +After you've installed the element, navigate to ``Settings``, open the panel ``Sinks Notifications JVM`` and add your +Slack API token. +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +Describe the configuration parameters here + +### Receiver + +The receiver of the Slack message. + +### Channel Type + +The channel type, should be "User" or "Channel" + +### Content + +The message that should be sent. + +## Output + +(not applicable for data sinks) \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.telegram.md b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.telegram.md new file mode 100644 index 000000000..e00bb8f65 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/pe/org.apache.streampipes.sinks.notifications.jvm.telegram.md @@ -0,0 +1,70 @@ +--- +id: org.apache.streampipes.sinks.notifications.jvm.telegram +title: Telegram Publisher +sidebar_label: Telegram Publisher +--- + + + + + +

+ +

+ +*** + +## Description + +Publisher to send notifications to a Telegram channel. + +In order to be able to do so, you will have first to: +* Create a Telegram public [channel](https://telegram.org/tour/channels). +> Private channels/groups: also supported. +* Create a Telegram BOT via [@BotFather](https://core.telegram.org/bots#3-how-do-i-create-a-bot) and get an API key. +* Set the bot as [administrator](https://www.wikihow.com/Make-Someone-an-Admin-on-Telegram) in your channel. + +*** + +## Required input + +This sink does not have any requirements and works with any incoming event type. + +*** + +## Configuration + +Describe the configuration parameters here. + +### Bot API Key + +The API Key generated by `@BotFather` when you created your bot. + +### Channel Name or Chat Id + +The handle name of your public channel (e.g. `@channel_name`). +> For private channels/groups: handle name only available for public channels. Use `chat_id` instead. + +### Content + +The message to be sent. + +## Output + +(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.95.1/user-guide-first-steps.md b/website-v2/versioned_docs/version-0.95.1/user-guide-first-steps.md new file mode 100644 index 000000000..b7ca89f62 --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/user-guide-first-steps.md @@ -0,0 +1,209 @@ +--- +id: user-guide-first-steps +title: First steps +sidebar_label: First steps +--- + +In this section, you'll learn how to create and run pipelines in StreamPipes. +Before starting with this guide, all steps from the installation guide must be finished successfully and StreamPipes must be up and running. + +This guide consists of three steps: +First, you'll learn how to create your first pipeline using the interactive tutorial. +Afterwards, this section contains two examples on how to create pipelines based on the built-in data simulator. + +## Interactive Tutorial +The easiest way to get started with StreamPipes is the interactive tutorial. Once you open the pipeline editor for the first time, you'll see a splash screen as shown below. +Click **Start tour** to start the interactive tutorial, which will guide you through your first steps with StreamPipes. + +![Interactive Tutorial](/img/quickstart/interactive-tutorial.png) + +If the splash screen does not show up, make sure you've installed the following pipeline elements: +* Flow rate (data stream) +* Numerical Filter (processing element) +* Dashboard (data sink) + +After you've finished this very first tour, try the following tours to learn more about other features of StreamPipes: +* Open the Live Dashboard and start the dashboard tour (by clicking the school icon in the top menu bar) to learn how to create real-time visualizations +* Open StreamPipes Connect and start the tour to learn how to connect new data sources with StreamPipes. + +## First Examples + +In this tutorial, you will create two simple pipelines that demonstrate the basic functionality of StreamPipes. +The first example deals with monitoring a flow rate sensor. +This pipeline ensures that everything works properly and data is sent through the whole system. +In the second example we model a more complex situation detection pipeline that triggers a notification. +Let's start with our first pipeline. +If you have problems with any of the examples, please send us an email. +We are happy to help you. + +## Data Simulation +All the data sources that we have integrated in the StreamPipes demonstrator are simulated according to real world sensors. +For example a flow rate sensor in a water pipe, which measures how much water flows in that pipe or a water level sensor in a water tank, that reports how high the water level is in the tank. +In the next section you will use the flow rate sensor to build your first pipeline. + +## Flow Rate Visualization +In the first example, we create a live line chart to monitor the value of the mass flow from a flow rate sensor. + +### Create Pipeline +* As a first step go to the pipeline editor +* Select the **FRS1** (Flow Rate Source 1)source +* Then navigate to the **Data Sink** tab +* Select the **DS** (Dashboard Sink) and connect the source with the sink +* After connecting the elements save the pipeline by clicking on the save button on the top left corner +* In the save menu add a name *Flow Rate Monitoring* and a description *This is my very first pipeline!* +* Also select the **Start pipeline immediatly** checkbox +* Then click the button **Save and go to pipeline view** +* You are navigated to the pipeline view and a confirmation that the pipeline was started successfully should be shown + + + +### Create Visualization +* After we have created the pipeline we must create the line chart +* Navigate to the **Live Dashboard** of StreamPipes +* Click on the **Add visualization** button +* Select the just created pipeline **Flow Rate Monitoring** and click the **next** button +* For the visualization select the **Line Chart** and click **next** again +* Now you have to enter the configuration for the line chart + * Select time mapping: **timestamp** + * Select number mapping: **mass_flow** + * Range Minimum: **0** + * Range Maximum: **10** +* When all parameters are set correctly click the next button again. +* Congratulation you created the first pipeline and should now see the line chart + + + + +## Condition monitoring of a water tank +In our second example we are going to create a more complex pipeline. +This pipeline has two sources, the flow rate sensor from the previous example and a source measuring the water level in a tank. +Our goal is to send a notification when the flow rate stops and the water level in the water tank sinks too fast. +In this case a service technician should check the system as soon as possible. +This example should just illustrate how the individual components work. +Since the system currently uses simulated data each time the situation occurs a notification is triggered. + +Now lets start! + + +### Build the pipeline +* First we have to select the **FRS1** (Flow Rate Sensor 1) and **WL1** (Water Level) form the sources tab +* In a first step we want to detect when the flow rate stops +* Use the **NF** (Numerical Filter) from the processing elements tab and connect it to the **FRS1** source +* Configure the **Numerical Filter**: + * Field name for filter operator: **mass_flow** + * Filter Operation: **<** + * Threshold value: **1** + + + +* As a next step we add an aggregation to the water level. This reduces the inaccuracies we have because the water moves in the tank. +* Select the **A** (Aggregation) processing element +* Connect the **WL1** with **A** +* Configure **Aggregation**: + * Property Selection: **level** + * Operation: **Average** + * Time Window Size: **10** + * Output Frequency: **1** + * Groupe By: **None** + + + +* With the next processing element we check if the water level decreases too fast +* Select **I** (Trend) and connect it to **A** +* Configure **Trend**: + * Timestamp field: **timestamp** + * Value to observe: **aggregatedValue** (This value is created by previous element) + * Group by: **sensorId** + * Increase / Decrease: **Decrease** + * Percentage of Increase / Decrease: **20** + * Time Window Length: **10** + * Select Output: **All** + + + +* Now we connect the two stream with the sequence element, which checks if both events occur in a certain time +* Select **S** (Sequence) and connect both data streams to it +* Configure **Sequence**: + * Time Window Size: **1** + * Time Unit: **sec** + + + +* Now we create a notification for the service technician that something is wrong with the system +* Select **N** (Notification) from the data sink tab +* Connect **S** with **N** +* Configure **Notification**: + * Notification title: **Alarm** + * Content: **One notification was triggered by our first complex pipeline. Yeahhhh!** + + + +* Add the dashboard sink to the increase element to monitor the preliminary results +* Select **DS** and connect to **I** + + + +* Save the pipeline +* Save configuration: + * Pipeline Name: **Second Pipeline** + * Description: **Complex monitorung rule** + * Start pipeline immediately: **CHECK** +* Click **Save and go to pipeline view** +* All pipeline elements should be started successfully +* It can be seen that the elements run on different technologies, in flink and a java container + * http://pe-flink-examples:8090 + * http://pe-jvm-examples:8090 +* Go to visualization and create **Raw Data** visualization for the new pipeline + + + +* Every time you can see output in the **Raw Data** visualization of the new pipeline and the **Line Chart** from the first example are zero, a Notification is triggered by the pipeline. +* Go to the **Notifications** view and have a look at the notification + + + + +Congratulation you finished the quick start! +It is recommended to stop the last pipeline, because it will keep creating notifications ;) + +We hope we gave you an easy quick start into StreamPipes. +If you have any questions or suggestions, just send us an email. +From here on you can explore all features in the [User Guide](user-guide-introduction) or go to the [Developer Guide](extend-setup) to learn how to write your own StreamPipes processing elements. + diff --git a/website-v2/versioned_docs/version-0.95.1/user-guide-for-quickstart.md b/website-v2/versioned_docs/version-0.95.1/user-guide-for-quickstart.md new file mode 100644 index 000000000..ec4166a2f --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/user-guide-for-quickstart.md @@ -0,0 +1,134 @@ +--- +id: user-guide-for-quickstart +title: Quickstart Tour +sidebar_label: Quickstart Tour +--- + +Apache StreamPipes is a self-service Industrial IoT toolbox to enable non-technical users to connect, analyze and explore IoT data streams. + +Quickstart Mode, ideal for first-time users, allows users to interact with pre-set pipelines, dashboards, and data views to experience StreamPipes' functionalities for IIoT. +This page provides an overview of StreamPipes' features in Quickstart Mode, guiding users on how to utilize them effectively. + +:::info +Do you have StreamPipes already running in the quickstart mode? If not, simply run the command `docker-compose -f docker-compose.quickstart.yml build script-runner` and `docker-compose -f docker-compose.quickstart.yml up -d` from the directory `installer/compose`. +::: + +First of all, a user needs to log in to the system. The default login credentials are `admin@streampipes.apache.org` as username and `admin` as password. +The credentials for the user are specified during the installation process. + + +## Home +After logging in, the user is redirected on to the home page. +The home page gives an overview over the different features available in the StreamPipes UI. + +On the left, the navigation menu can be seen. +You can either use the icons on the left side or click on the menu icon on the +top left to open the details view of the navigation menu. + + + +## Connect +The _Connect_ view provides an overview of all existing adapters to let StreamPipes connect data sources. + +With StreamPipes Connect it is possible to connect new data sources in StreamPipes with just a few clicks. +It is also possible to connect specific data sources or connect generic sources like message brokers or databases. +If the event schema of the data source is unknown, the system tries to infer the schema by extracting some sample data and analysing it. + +In the Quickstart Mode, we use the Machine Data Simulator and Data Stream to simulate the factory machine condition data and environmental data, just click the `START ALL ADAPTERS` button. This starts all existing adapters to offer data sources for pipelines. + + + + +## Pipelines +The _Pipelines_ view provides an overview of all existing pipelines. + +A pipeline in Apache StreamPipes describes the transformation process from a data stream to a data sink. Typically, a pipeline consists of at least one data stream (or data set), zero or more data processors and at least one data sink. +Existing pipelines can be managed using this pipeline view. For instance, users can start and stop pipelines or delete them when they are not longer needed. + +In the pipeline page, we could see there are 5 pipelines, click the `START ALL ADAPTERS` button. This starts all existing adapters to offer data sources for pipelines. + + + + +In smart factory , monitoring and analysing machine parameters are essential steps to ensure machine safety, and minimizing downtime. In the industrial machine cooling system or hydraulic machines, water level could be an important part of monitoring parameters to ensure machine's performance and safety. +Using the `Water level trend analysis` pipeline as an example, by clicking the `Show pipeline` button, we could see the details of the pipeline as the following figure. + + + +The pipeline contains 3 major parts: +- **Data source**: + - Machine Data Simulator (Water Level). +- **Data Processors**: + - Trend Detector: to analyse the water level increase speed within specified time, when Trend Detector find the speed is reaching the limit, it will trigger the notification. + - Welford Change Detection: to calculate the variance of the mean and change of the water level over a specified time + - Boolean Counter: to count each `overflow` status change from `false` to `true`. + - Numerical Filter: when `overflow` status change from `false` to `true` reaches 5 times, it will trigger Notification. +- **Data Sinks**: + - Notification: to push a notification when the measured parameter condition reaches the set condition of the trigger, which are sent to Notification interface. + - Data Lake: to store the events in the internal data lake, offering data for dashboard and data explorer. + + +## Dashboard +The _Dashboard_ can be used for multiple use cases. + +It is a good way to live monitor a running system in a dashboard, it also could be used during the pipeline development to get a fast feedback from newly created pipelines. +Currently 10 different types of visualizations are available, including line charts, various map visualizations and data tables. The Dashboard feature allows you to select and view real-time data from specific pipelines that interest you. + +In the Quickstart mode, we offer three distinct dashboards, each designed for a specific application scenario. These dashboards include factory temperature monitoring, sensor running condition monitoring, and real-time machine data monitoring. + + + + +As shown in the following figure, taking the `Smart Machine Monitor` as an example, the dashboard can showcase real-time data for monitoring machine parameters such as pressure and water level. + + + +## Data Explorer +The _Data Explorer_ can be used for visualizing and exploring data streams that have been persisted using the Data Lake sink. + +Once your data has been stored in the data lake, you can navigate to the data explorer tab to craft a new data view with the widgets of your preference. +In StreamPipes, a data view is a compilation of related widgets, which can be data visualizations or plots, all assigned to a specific date and time range. By default, this range encompasses the last 15 minutes of the current date and time. However, you also have the flexibility to choose from predefined ranges, such as a day or a month, or to customize the exact date and time range that you wish to investigate. + +In the Quickstart Mode, we try to demonstrate the StreamPipes' practicability and convenience in data analysis, by providing example data views for specific application scenarios, including machine running condition monitor, problem analysis, factory environment monitor. + + + +As shown in the following figure, taking the `Machine Running Condition Monitoring of Water Level and Pressure` data view as an example, the data explorer can show real-time machine data as different types of widgets, depending on what the user needs, to help analyse the data better. +In the machine running condition scenarios, we pay attention to the distribution of water level and pressure, as well as the pressure change condition. + + + + + +## Notifications +The _Notifications_ can be used for notifying someone when a urgent situation occurs. + +A notification can be created by using the notification sink in the pipeline editor. +The message can be nicely configured in the message editor. +It is also possible to embed values of the event that triggered the notification. +All available properties are presented in the notification configurator. + +In Quickstart mode, StreamPipes have pre-defined several notification trigger in the example pipelines, when you click the `Notification` button in the upper right corner, you could see the notification dialog. + + + +Congratulations! You've just finished Quickstart user guide of StreamPipes. + diff --git a/website-v2/versioned_docs/version-0.95.1/user-guide-tour.md b/website-v2/versioned_docs/version-0.95.1/user-guide-tour.md new file mode 100644 index 000000000..a6ae2f8ff --- /dev/null +++ b/website-v2/versioned_docs/version-0.95.1/user-guide-tour.md @@ -0,0 +1,305 @@ +--- +id: user-guide-tour +title: Tour +sidebar_label: Tour +--- + +StreamPipes is a framework that enables domain experts to model and execute stream processing pipelines in a big data infrastructure. +The graphical user interface of StreamPipes is a web application that provides an easy to use solution for domain experts. +In this page, an overview of the many features of StreamPipes is given. We will tour through all features and explain what they do and how users can interact with them. + +First of all, a user needs to log in to the system. +The credentials for the user are specified during the installation process. + +![StreamPipes Login](/img/features_0_62_0/login.png) + +## Home +After logging in, the user is redirected on to the home page. +The home page gives an overview over the different features available in the StreamPipes UI. + +On the left, the navigation menu can be seen. +You can either use the icons on the left side or click on the menu icon on the +top left to open the details view of the navigation menu. + +On the top right, a link refers to the documentation and the logout button is present. + + + + + +## Pipeline Editor +The first feature we are going to explain is the Pipeline Editor. +This is one of the central features of StreamPipes, since graphical modeling of pipelines takes place in this view. + +On the top we can see four tabs: __Data Sets__, __Data Streams__, __Processing Elements__, and __Data Sinks__. +Each tab contains multiple pipeline elements, which can be installed at runtime. +The installation of pipeline elements is explained later in section [Install Pipeline Elements](user-guide-tour.md#install-pipeline-elements). + + + +There are multiple ways to search for a pipeline element. +The easiest way is to enter a search term in the search field on the top left corner. +The system filters the elements according to the search term and only presents the relevant ones. +Another way is to select one of the categories from the drop down menu. +The system then filters the elements according to the selected category. +The category of an element is defined by its functionality. + + + +Modelling of a pipeline starts with choosing one ore more data sources. Therefore a data stream or data set must be selected +and moved into the editor via drag and drop. +After adding a data stream, we can select a processing element to transform the events of the data stream. +This is done again by dragging the processing element icon into our editor. +The mouse can be used to connect the two elements. +It is automatically checked in the background if the two elements are semantically compatible. +If this check is successful, a configuration menu is shown. It contains the parameters that can be modified by the +user. For all parameters, a description is provided and the system also prevents the user from entering parameters that +are not correct, according to the semantic description of the element. +It is also ensured that all required parameters are provided by the user, otherwise an error message is displayed. + + + + +When the user tries to connect two elements that are not compatible, the system shows a __connection error__ message. +The example illustrated below shows such a case. The user tried to connect a text filter to a light sensor. +This is not possible since the text filter processing element requires at least one event property of type string, which is not provided by the light sensor. + +![Connection Error](/img/features_0_62_0/editor/10_connection_error.png) + +To further improve the usability, multiple ways are available to connect new elements besides the drag and drop option. +Each processing element has multiple short-cut buttons to connect it with another element. +The first one (eye icon) can be used to get a suggestion of all elements that are compatible with the current element. +The second one (plus icon) gives a recommendation on the elements the user might want to connect, based on the usage of the component in +other pipelines. +There is also a button (question tag icon) to open the documentation of a selected element. +Elements can be deleted by clicking the 'delete' button. +Each element can also be re-configured at a later point in time using the configuration button. +There is one important aspect about re-configuration of pipeline elements you need to be aware of: Only elements that are not yet connected to another element can be modified. +The reason for this behaviour is that some of the following elements might rely on the configuration of previous elements. +This way it is ensured that the user can not change the behaviour of the pipeline by accident. + + + + + +After the pipeline is completely modelled, the editor looks similar to the first image below. +Especially for larger pipelines, the auto layout button in the editor menu might be helpful. +With this button, the pipeline will be beautifully aligned in the editor, helping users to get a better overview of the complete pipeline. +On the top left corner of the editor, the 'save' button can be found. +After the modelling of the pipeline is done, use this button to save and execute the pipeline. +A save dialogue pops up when clicking the save button. +The pipeline title must be entered and an additional description can be provided. +It is recommended to always provide a description, because it makes it easier for other users to understand the meaning of the pipeline. +In the save menu, the user can either just store the pipeline configuration or store it and immediately start the pipeline. +Once the pipeline is executed, the user is redirected to the _Manage Pipeline_ view. +In this view, the user gets immediate feedback whether all components did start correctly and the pipeline is up and running. +This view also shows that individual elements might run in different environments on different servers. +If there is an error during the execution, a notification containing a (hopefully) detailed error description is provided in this view. + + +## Connect new Data Sources +With StreamPipes Connect it is possible to connect new data sources in StreamPipes with just a few clicks. +Therefore, we provide a marketplace with a lot of different adapters that can be configured and executed to create new __Data Streams__ in the __Pipeline Editor__. +With StreamPipes Connect it is possible to connect specific data sources - e.g. an adapter streaming the current location of the ISS (International Space Station). +It is also possible to connect generic sources like message brokers or databases. +If the event schema of the data source is unknown, the system tries to infer the schema by extracting some sample data and analysing it. + + + +Additionally to connecting new sources, data can be cleaned, transformed, and enriched with meta-information. +Therefore, the event schema can be changed or enriched in step 3 (Define Event Schema). + + + +## Manage Pipelines +The _Manage Pipelines_ view provides an overview of all existing pipelines. +Existing pipelines can be managed using this view. +For instance, users can start and stop pipelines or delete them when they are not longer needed. +Pipeline actions can be performed by clicking one of the buttons next to each pipeline. +For each pipeline, the title and description is displayed in the table. +By clicking the edit symbol an overview of the created pipline is shown. In this window you are able to analyze statistics, identify errors or edit the pipeline. + + + +In a setting with many defined pipelines, it can get really hard to keep track of all pipelines. +This is why we introduce categories. +A category is a set of pipelines that can be defined by users in order to better organize pipelines. +By clicking on the "Manage Categories" button on the top left, a new category can be added to StreamPipes. +In this example, we create a new category named "New Demo Category". +After saving a category, pipelines can be added to the newly created category. +The new category is then presented as a tab in the _Pipeline Management_ view. +This tab contains all previously defined pipelines. + + + +## Live Dashboard +The live dashboard can be used for multiple use cases. +It is a good way to live monitor a running system in a dashboard, but it can also be used during the pipeline development to get a fast feedback from newly created pipelines. +Below is a screenshot of an example dashboard showing the current value of pressure, which further shows a line chart and a trafficlight for a water level pipeline. +All pipelines that contain the "Dashboard Sink" can be visualized in this view. +To add a new visualisation, click on the "Add visualisation" button on the top left corner. +Afterwards, a three-step configuration menu is shown. +The first step is to select the pipeline that should be visualized. +In the second step, the type of visualization can be defined. +Currently 10 different types are available, including line charts, various map visualizations and data tables. +After selecting the type (in our example "Gauge"), you can select the specific measurement values of the data stream that should be displayed. +In the example below, the water level value should be monitored and the gauge value should range from 0 to 100. +Once all steps are completed, the new visualization is placed on the dashboard and live data is presented as soon as it becomes available. + + + +## File Download +With the file download, it is possible to download stored files directly from Elasticsearch. +This can be very useful for example when a data dumb is needed for a specific time range. +All data that is written into Elasticsearch using the _Elasticsearch Sink_ can be accessed by the file downloader. +A common use case is to download data for offline analysis and to train a machine learning algorithm. +First, an index must be defined, afterwards, the time range must be set. +A date picker helps users to enter the time range. +When a user clicks the "Create File" button the file is created. +All files stored on the server can be downloaded via the download button. +If the files are not longer needed, they can be deleted by clicking the delete button. +This will remove the file from the server. +Since data is stored in Elasticsearch anyways. it is recommended not to store the files for a longer period of time on the server. +When a file is needed again at a later point in time it is easy to create it again. +This way a lot of disk space on the server can be saved, especially when the files are rather large. + + + +## Notifications +Notifications are a good solution to notify someone when a urgent situation occurs. +A notification can be created by using the notification sink in the pipeline editor. +When using such a sink a configuration dialogue is presented to the user. +In this dialogue the user must provide enough information to resolve the solution when it occurs. +The message can be nicely configured in the message editor. +It is also possible to embed values of the event that triggered the notification. +This can be done with the #property# notation. +All available properties are presented in the notification configurator. +When the notification is triggered the #property# template is replaced with the actual value of the property. + + + +A pop up icon on the notification tab in the menu shows the user how many unread notifications currently are in the system. +This icon also alerts users when new notifications occur. +In the notification overview all notifications are listed. +On the top are the new notifications that are not read yet. +A user can mark them as read by clicking on the little envelope icon. +Those notifications are then no longer in the unread section, but they remain in the "All Messages" view. +This way it is possible to keep track of all notifications and have a look at them at a later point in time. + + + + +## Install Pipeline Elements +StreamPipes is highly configurable and extensible. +Pipeline elements can be added and removed during runtime. +This can be done in the "Install Pipeline Elements" view. +All available pipeline elements are shown here and can be selected to install or uninstall. +It is also possible to select multiple or all of them and then install them all together. +When a new element is installed by the user it is automatically available in the "Pipeline Editor" and can be used for pipelines. +Elements that are uninstalled are removed from the system. +They can not longer be used within pipelines. + + + + +## My Elements +The "My Elements" view gives a quick overview over all installed pipeline elements. +Here they can be inspected and the description can be re-imported. +In this view it is also possible to have a look at the JSON-LD representation of each element. +This is not important for a user, but it is worth noting that the system uses this machine understandable format to support the user. +For example all the information of the sources, like data schema or unit information is in the JSON-LD meta data. + + + +## Configuration +The last feature is the _configuration view_. +Here, it is possible to change the configuration parameters of installed components. +All components containing processing elements automatically register in StreamPipes when they are started in the same network as the backend component. +Once a container is registered it is represented in the configuration view. +The green dot on the top left indicates that tha container is running properly. +When there is a problem with a container the green dot changes to red and the user knows there is a problem. +To configure the parameters the top right arrow of a configuration box must be clicked. +Then the configuration menu pops up. +Within there it is possible to change the parameters of a service. +To make the changes persistent the "Update" button must be clicked. +A user should keep in mind that sometimes it is necessary to restart a container when the parameters are changed. +Sometimes it is also necessary to re-import the pipeline element description, either by uninstalling and re-installing them after the container restart or be reloading the description in the "My elements" view. + + + + +Congratulations! You've just finished your first tour of StreamPipes. +Although there's still more to learn, we introduced most of the currently available features. +On the next page, the different processing elements that come with the installation are explained. diff --git a/website-v2/versioned_sidebars/version-0.95.1-sidebars.json b/website-v2/versioned_sidebars/version-0.95.1-sidebars.json new file mode 100644 index 000000000..f45b5a41f --- /dev/null +++ b/website-v2/versioned_sidebars/version-0.95.1-sidebars.json @@ -0,0 +1,204 @@ +{ + "documentation": { + "🚀 Try StreamPipes": [ + "user-guide-introduction", + "try-installation", + "user-guide-for-quickstart" + ], + "💡 Concepts": [ + "introduction", + "concepts-overview" + ], + "🎓 Use StreamPipes": [ + "use-connect", + "use-pipeline-editor", + "use-managing-pipelines", + "use-dashboard", + "use-data-explorer", + "use-notifications", + "use-install-pipeline-elements", + "use-configurations" + ], + "📚 Pipeline Elements": [ + { + "type": "category", + "label": "Adapters", + "items": [ + "pe/org.apache.streampipes.connect.iiot.protocol.stream.kafka", + "pe/org.apache.streampipes.connect.iiot.protocol.stream.pulsar", + "pe/org.apache.streampipes.connect.iiot.protocol.stream.rocketmq", + "pe/org.apache.streampipes.connect.iiot.protocol.stream.tubemq", + "pe/org.apache.streampipes.connect.iiot.protocol.stream.file", + "pe/org.apache.streampipes.connect.iiot.protocol.stream.httpserver", + "pe/org.apache.streampipes.connect.iiot.protocol.stream.http", + "pe/org.apache.streampipes.connect.adapters.iss", + "pe/org.apache.streampipes.connect.adapters.image.stream", + "pe/org.apache.streampipes.connect.iiot.adapters.influxdb.stream", + "pe/org.apache.streampipes.connect.iiot.protocol.stream.mqtt", + "pe/org.apache.streampipes.connect.iiot.adapters.simulator.machine", + "pe/org.apache.streampipes.connect.iiot.protocol.stream.nats", + "pe/org.apache.streampipes.connect.iiot.adapters.netio.mqtt", + "pe/org.apache.streampipes.connect.iiot.adapters.netio.rest", + "pe/org.apache.streampipes.connect.iiot.adapters.oi4", + "pe/org.apache.streampipes.connect.iiot.adapters.opcua", + "pe/org.apache.streampipes.connect.iiot.adapters.plc4x.modbus", + "pe/org.apache.streampipes.connect.iiot.adapters.plc4x.s7", + "pe/org.apache.streampipes.connect.iiot.adapters.ros", + "pe/org.apache.streampipes.connect.iiot.adapters.iolink" + ] + }, + { + "type": "category", + "label": "Data Processors", + "items": [ + "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter", + "pe/org.apache.streampipes.processors.filters.jvm.processor.booleanfilter", + "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter", + "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.logical", + "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer", + "pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state", + "pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata", + "pe/org.apache.streampipes.processors.transformation.jvm.duration-value", + "pe/org.apache.streampipes.processors.textmining.jvm.chunker", + "pe/org.apache.streampipes.processors.filters.jvm.compose", + "pe/org.apache.streampipes.processors.transformation.jvm.count-array", + "pe/org.apache.streampipes.processors.siddhi.count", + "pe/org.apache.streampipes.processors.transformation.jvm.datetime", + "pe/org.apache.streampipes.processors.transformation.jvm.fieldhasher", + "pe/org.apache.streampipes.processors.transformation.jvm.field-mapper", + "pe/org.apache.streampipes.processors.transformation.jvm.fieldrename", + "pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification", + "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.buffergeometry", + "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.bufferpoint", + "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.reprojection", + "pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.revgeocoder.geocityname", + "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latlngtojtspoint", + "pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversine", + "pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.distancecalculator.haversinestatic", + "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.epsg", + "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.complex", + "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.validation.simple", + "pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemaps", + "pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.geocoder.googlemapsstatic", + "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory", + "pe/org.apache.streampipes.processors.geo.jvm.latlong.processor.speedcalculator", + "pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper", + "pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher", + "pe/org.apache.streampipes.processors.enricher.jvm.jseval", + "pe/org.apache.streampipes.processors.siddhi.listcollector", + "pe/org.apache.streampipes.processors.siddhi.listfilter", + "pe/org.apache.streampipes.processors.enricher.jvm.processor.math.mathop", + "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping", + "pe/org.apache.streampipes.processors.transformation.jvm.measurementunitconverter", + "pe/org.apache.streampipes.processors.filters.jvm.enrich", + "pe/org.apache.streampipes.processors.filters.jvm.schema", + "pe/org.apache.streampipes.processors.filters.jvm.movingaverage", + "pe/org.apache.streampipes.processors.textmining.jvm.namefinder", + "pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number", + "pe/org.apache.streampipes.processors.transformation.jvm.round", + "pe/org.apache.streampipes.processors.filters.jvm.numericalfilter", + "pe/org.apache.streampipes.processors.siddhi.numericalfilter", + "pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter", + "pe/org.apache.streampipes.processors.textmining.jvm.partofspeech", + "pe/org.apache.streampipes.processors.filters.jvm.project", + "pe/org.apache.streampipes.processor.imageclassification.qrcode", + "pe/org.apache.streampipes.processors.filters.jvm.limit", + "pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection", + "pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge", + "pe/org.apache.streampipes.processors.transformation.jvm.split-array", + "pe/org.apache.streampipes.processors.enricher.jvm.processor.math.staticmathop", + "pe/org.apache.streampipes.processors.transformation.jvm.processor.staticmetadata", + "pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter", + "pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer", + "pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state", + "pe/org.apache.streampipes.processors.filters.jvm.sdt", + "pe/org.apache.streampipes.processors.filters.jvm.merge", + "pe/org.apache.streampipes.processors.transformation.jvm.taskduration", + "pe/org.apache.streampipes.processors.filters.jvm.textfilter", + "pe/org.apache.streampipes.processors.filters.jvm.threshold", + "pe/org.apache.streampipes.processors.filters.jvm.throughputmon", + "pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor", + "pe/org.apache.streampipes.processors.textmining.jvm.tokenizer", + "pe/org.apache.streampipes.processors.siddhi.topk", + "pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean", + "pe/org.apache.streampipes.processors.siddhi.increase", + "pe/org.apache.streampipes.processors.enricher.jvm.processor.trigonometry", + "pe/org.apache.streampipes.processors.enricher.jvm.valuechange", + "pe/org.apache.streampipes.processors.transformation.jvm.changed-value", + "pe/org.apache.streampipes.processors.changedetection.jvm.welford" + ] + }, + { + "type": "category", + "label": "Data Sinks", + "items": [ + "pe/org.apache.streampipes.sinks.databases.jvm.iotdb", + "pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest", + "pe/org.apache.streampipes.sinks.databases.jvm.couchdb", + "pe/org.apache.streampipes.sinks.internal.jvm.datalake", + "pe/org.apache.streampipes.sinks.databases.ditto", + "pe/org.apache.streampipes.sinks.notifications.jvm.email", + "pe/org.apache.streampipes.sinks.databases.jvm.influxdb", + "pe/org.apache.streampipes.sinks.brokers.jvm.jms", + "pe/org.apache.streampipes.sinks.brokers.jvm.kafka", + "pe/org.apache.streampipes.sinks.brokers.jvm.mqtt", + "pe/org.apache.streampipes.sinks.notifications.jvm.msteams", + "pe/org.apache.streampipes.sinks.brokers.jvm.nats", + "pe/org.apache.streampipes.sinks.internal.jvm.notification", + "pe/org.apache.streampipes.sinks.databases.jvm.opcua", + "pe/org.apache.streampipes.sinks.notifications.jvm.onesignal", + "pe/org.apache.streampipes.sinks.databases.jvm.postgresql", + "pe/org.apache.streampipes.sinks.brokers.jvm.pulsar", + "pe/org.apache.streampipes.sinks.brokers.jvm.rest", + "pe/org.apache.streampipes.connectors.ros.sink", + "pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq", + "pe/org.apache.streampipes.sinks.databases.jvm.redis", + "pe/org.apache.streampipes.sinks.brokers.jvm.rocketmq", + "pe/org.apache.streampipes.sinks.notifications.jvm.slack", + "pe/org.apache.streampipes.sinks.notifications.jvm.telegram", + "pe/org.apache.streampipes.sinks.brokers.jvm.tubemq", + "pe/org.apache.streampipes.sinks.brokers.jvm.websocket" + ] + } + ], + "⚡ Deploy StreamPipes": [ + "choosing-the-right-flavor", + "deploy-docker", + "deploy-kubernetes", + "deploy-use-ssl", + "deploy-security", + "deploy-environment-variables" + ], + "💻 Customize StreamPipes": [ + "extend-setup", + "extend-cli", + "extend-archetypes", + "extend-first-processor", + "extend-tutorial-adapters", + "extend-tutorial-data-processors", + "extend-tutorial-data-sinks", + "extend-client", + "extend-sdk-functions", + "extend-sdk-event-model", + "extend-sdk-migration", + "extend-sdk-stream-requirements", + "extend-sdk-static-properties", + "extend-sdk-output-strategies", + "extend-customize-ui" + ], + "🔧 Technicals": [ + "technicals-architecture", + "technicals-runtime-wrappers", + "technicals-messaging" + ], + "👪 Community": [ + "community-get-help", + "community-contribute" + ] + }, + "faq": { + "FAQ": [ + "faq-common-problems" + ] + } +} diff --git a/website-v2/versions.json b/website-v2/versions.json index 2e26bd1c4..e704e2796 100644 --- a/website-v2/versions.json +++ b/website-v2/versions.json @@ -1,4 +1,5 @@ [ + "0.95.1", "0.95.0", "0.93.0", "0.92.0", From 6fa021986e671268d793767d03931c797e1e687b Mon Sep 17 00:00:00 2001 From: Dominik Riemer Date: Mon, 8 Jul 2024 19:06:31 +0200 Subject: [PATCH 3/3] Remove release docs 0.70.0 --- .../version-0.70.0/01_try-installation.md | 71 ---- .../version-0.70.0/01_try-overview.md | 122 ------ .../version-0.70.0/01_try-tutorial.md | 21 - .../version-0.70.0/02_concepts-adapter.md | 8 - .../02_concepts-data-streams.md | 8 - .../version-0.70.0/02_concepts-glossary.md | 8 - .../version-0.70.0/02_concepts-overview.md | 36 -- .../version-0.70.0/02_concepts-pipeline.md | 8 - .../version-0.70.0/03_use-configurations.md | 49 --- .../version-0.70.0/03_use-connect.md | 73 ---- .../version-0.70.0/03_use-dashboard.md | 67 ---- .../version-0.70.0/03_use-data-explorer.md | 103 ----- .../03_use-install-pipeline-elements.md | 10 - .../03_use-managing-pipelines.md | 53 --- .../version-0.70.0/03_use-notifications.md | 26 -- .../version-0.70.0/03_use-pipeline-editor.md | 63 --- .../version-0.70.0/05_deploy-docker.md | 75 ---- .../version-0.70.0/05_deploy-kubernetes.md | 61 --- .../version-0.70.0/05_deploy-security.md | 76 ---- .../version-0.70.0/05_deploy-use-ssl.md | 36 -- .../version-0.70.0/06_extend-archetypes.md | 65 ---- .../version-0.70.0/06_extend-cli.md | 191 --------- .../06_extend-first-processor.md | 58 --- .../06_extend-sdk-event-model.md | 142 ------- .../version-0.70.0/06_extend-sdk-functions.md | 126 ------ .../06_extend-sdk-migration-sd.md | 117 ------ .../06_extend-sdk-output-strategies.md | 349 ----------------- .../06_extend-sdk-static-properties.md | 267 ------------- .../06_extend-sdk-stream-requirements.md | 179 --------- .../version-0.70.0/06_extend-setup.md | 51 --- .../06_extend-tutorial-data-processors.md | 363 ------------------ .../06_extend-tutorial-data-sinks.md | 231 ----------- .../06_extend-tutorial-data-sources.md | 214 ----------- .../07_technicals-architecture.md | 63 --- .../07_technicals-configuration.md | 59 --- .../version-0.70.0/07_technicals-messaging.md | 8 - .../07_technicals-runtime-wrappers.md | 8 - .../07_technicals-user-guidance.md | 8 - .../version-0.70.0/08_debugging.md | 8 - .../version-0.70.0/08_monitoring.md | 8 - .../version-0.70.0/09_contribute.md | 18 - .../version-0.70.0/09_get-help.md | 26 -- .../version-0.70.0/dev-guide-archetype.md | 7 - .../version-0.70.0/dev-guide-processor-sdk.md | 12 - .../version-0.70.0/dev-guide-sink-sdk.md | 12 - .../version-0.70.0/faq-common-problems.md | 74 ---- ...e.streampipes.connect.adapters.coindesk.md | 45 --- ....streampipes.connect.adapters.flic.mqtt.md | 60 --- ...ache.streampipes.connect.adapters.gdelt.md | 43 --- ...e.streampipes.connect.adapters.iex.news.md | 49 --- ...streampipes.connect.adapters.iex.stocks.md | 48 --- ....streampipes.connect.adapters.image.set.md | 39 -- ...reampipes.connect.adapters.image.stream.md | 39 -- ...reampipes.connect.adapters.influxdb.set.md | 42 -- ...mpipes.connect.adapters.influxdb.stream.md | 42 -- ...apache.streampipes.connect.adapters.iss.md | 40 -- ....streampipes.connect.adapters.mysql.set.md | 40 -- ...reampipes.connect.adapters.mysql.stream.md | 40 -- ...streampipes.connect.adapters.netio.mqtt.md | 65 ---- ...streampipes.connect.adapters.netio.rest.md | 65 ---- ...ect.adapters.nswaustralia.trafficcamera.md | 40 -- ...ache.streampipes.connect.adapters.opcua.md | 77 ---- ...reampipes.connect.adapters.plc4x.modbus.md | 40 -- ...e.streampipes.connect.adapters.plc4x.s7.md | 40 -- ...apache.streampipes.connect.adapters.ros.md | 65 ---- ...ipes.connect.adapters.simulator.machine.md | 41 -- ...onnect.adapters.simulator.randomdataset.md | 40 -- ...ect.adapters.simulator.randomdatastream.md | 40 -- ...ache.streampipes.connect.adapters.slack.md | 40 -- ....apache.streampipes.connect.adapters.ti.md | 58 --- ...ampipes.connect.adapters.wikipedia.edit.md | 40 -- ...eampipes.connect.adapters.wikipedia.new.md | 40 -- ...treampipes.connect.protocol.stream.file.md | 39 -- ...treampipes.connect.protocol.stream.http.md | 39 -- ...ipes.connect.protocol.stream.httpserver.md | 39 -- ...reampipes.connect.protocol.stream.kafka.md | 39 -- ...treampipes.connect.protocol.stream.mqtt.md | 54 --- ...eampipes.connect.protocol.stream.pulsar.md | 39 -- ....apache.streampipes.processor.geo.flink.md | 52 --- ...streampipes.processor.geo.jvm.geocoding.md | 61 --- ...ipes.processor.geo.jvm.reversegeocoding.md | 65 ---- ...pipes.processor.geo.jvm.staticgeocoding.md | 62 --- ...cation.jvm.generic-image-classification.md | 53 --- ...r.imageclassification.jvm.image-cropper.md | 44 --- ....imageclassification.jvm.image-enricher.md | 44 --- ...es.processor.imageclassification.qrcode.md | 69 ---- ...rocessors.aggregation.flink.aggregation.md | 62 --- ...ipes.processors.aggregation.flink.count.md | 67 ---- ...processors.aggregation.flink.eventcount.md | 57 --- ...pipes.processors.aggregation.flink.rate.md | 55 --- ...es.processors.changedetection.jvm.cusum.md | 66 ---- ...rs.enricher.flink.processor.math.mathop.md | 57 --- ...icher.flink.processor.math.staticmathop.md | 57 --- ...s.enricher.flink.processor.trigonometry.md | 57 --- ...richer.flink.processor.urldereferencing.md | 52 --- ...pes.processors.enricher.flink.timestamp.md | 49 --- ...pes.processors.enricher.jvm.sizemeasure.md | 50 --- ...eampipes.processors.filters.jvm.compose.md | 51 --- ...reampipes.processors.filters.jvm.enrich.md | 48 --- ...treampipes.processors.filters.jvm.limit.md | 71 ---- ...treampipes.processors.filters.jvm.merge.md | 58 --- ....processors.filters.jvm.numericalfilter.md | 57 --- ...cessors.filters.jvm.numericaltextfilter.md | 68 ---- ...eampipes.processors.filters.jvm.project.md | 49 --- ...pipes.processors.filters.jvm.textfilter.md | 54 --- ...mpipes.processors.filters.jvm.threshold.md | 57 --- ...ssors.geo.jvm.jts.processor.latLngToGeo.md | 74 ---- ...rocessors.geo.jvm.jts.processor.setEPSG.md | 60 --- ...essors.geo.jvm.jts.processor.trajectory.md | 84 ---- ...rs.geo.jvm.processor.distancecalculator.md | 61 --- ...ipes.processors.geo.jvm.processor.speed.md | 56 --- ....jvm.processor.staticdistancecalculator.md | 71 ---- ...cessors.pattern-detection.flink.absence.md | 54 --- ....processors.pattern-detection.flink.and.md | 54 --- ....pattern-detection.flink.peak-detection.md | 54 --- ...essors.pattern-detection.flink.sequence.md | 54 --- ...streampipes.processors.siddhi.frequency.md | 56 --- ...pipes.processors.siddhi.frequencychange.md | 59 --- ....streampipes.processors.siddhi.increase.md | 66 ---- ...pipes.processors.siddhi.numericalfilter.md | 64 --- ....streampipes.processors.siddhi.sequence.md | 36 -- ...ache.streampipes.processors.siddhi.stop.md | 57 --- ...ors.statistics.flink.statistics-summary.md | 44 --- ...sors.textmining.flink.languagedetection.md | 54 --- ...s.processors.textmining.flink.wordcount.md | 54 --- ...pipes.processors.textmining.jvm.chunker.md | 70 ---- ...essors.textmining.jvm.languagedetection.md | 170 -------- ...es.processors.textmining.jvm.namefinder.md | 66 ---- ....processors.textmining.jvm.partofspeech.md | 63 --- ...essors.textmining.jvm.sentencedetection.md | 60 --- ...pes.processors.textmining.jvm.tokenizer.md | 60 --- ...rs.transformation.flink.field-converter.md | 55 --- ...ssors.transformation.flink.field-mapper.md | 70 ---- ...sors.transformation.flink.field-renamer.md | 69 ---- ...essors.transformation.flink.fieldhasher.md | 55 --- ...mation.flink.measurement-unit-converter.md | 54 --- ...nsformation.flink.processor.boilerplate.md | 50 --- ...transformation.jvm.booloperator.counter.md | 65 ---- ...ransformation.jvm.booloperator.inverter.md | 52 --- ...sformation.jvm.booloperator.timekeeping.md | 71 ---- ...s.transformation.jvm.booloperator.timer.md | 57 --- ...essors.transformation.jvm.changed-value.md | 46 --- ...ocessors.transformation.jvm.count-array.md | 56 --- ...ocessors.transformation.jvm.csvmetadata.md | 77 ---- ...ssors.transformation.jvm.duration-value.md | 52 --- ...rmation.jvm.processor.booloperator.edge.md | 58 --- ...mation.jvm.processor.booloperator.state.md | 59 --- ...ansformation.jvm.processor.state.buffer.md | 55 --- ...tion.jvm.processor.state.labeler.buffer.md | 70 ---- ...tion.jvm.processor.state.labeler.number.md | 59 --- ...tion.jvm.processor.stringoperator.state.md | 52 --- ...mation.jvm.processor.timestampextractor.md | 59 --- ...ocessors.transformation.jvm.split-array.md | 60 --- ...ansformation.jvm.stringoperator.counter.md | 62 --- ...transformation.jvm.stringoperator.timer.md | 64 --- ...cessors.transformation.jvm.taskduration.md | 51 --- ...transformation.jvm.transform-to-boolean.md | 51 --- ...rg.apache.streampipes.protocol.set.file.md | 39 -- ...rg.apache.streampipes.protocol.set.http.md | 39 -- ...treampipes.sinks.brokers.jvm.bufferrest.md | 59 --- ...pache.streampipes.sinks.brokers.jvm.jms.md | 61 --- ...che.streampipes.sinks.brokers.jvm.kafka.md | 62 --- ...ache.streampipes.sinks.brokers.jvm.mqtt.md | 62 --- ...ache.streampipes.sinks.brokers.jvm.nats.md | 79 ---- ...he.streampipes.sinks.brokers.jvm.pulsar.md | 64 --- ....streampipes.sinks.brokers.jvm.rabbitmq.md | 74 ---- ...ache.streampipes.sinks.brokers.jvm.rest.md | 53 --- ...pache.streampipes.sinks.databases.ditto.md | 74 ---- ...pes.sinks.databases.flink.elasticsearch.md | 61 --- ...streampipes.sinks.databases.jvm.couchdb.md | 64 --- ...treampipes.sinks.databases.jvm.influxdb.md | 86 ----- ...e.streampipes.sinks.databases.jvm.iotdb.md | 71 ---- ...e.streampipes.sinks.databases.jvm.mysql.md | 75 ---- ...e.streampipes.sinks.databases.jvm.opcua.md | 73 ---- ...eampipes.sinks.databases.jvm.postgresql.md | 74 ---- ...treampipes.sinks.internal.jvm.dashboard.md | 53 --- ...streampipes.sinks.internal.jvm.datalake.md | 86 ----- ...ampipes.sinks.internal.jvm.notification.md | 58 --- ...reampipes.sinks.notifications.jvm.email.md | 68 ---- ...pipes.sinks.notifications.jvm.onesignal.md | 64 --- ...reampipes.sinks.notifications.jvm.slack.md | 68 ---- ...mpipes.sinks.notifications.jvm.telegram.md | 71 ---- .../version-0.70.0-sidebars.json | 213 ---------- website-v2/versions.json | 3 +- 184 files changed, 1 insertion(+), 11680 deletions(-) delete mode 100644 website-v2/versioned_docs/version-0.70.0/01_try-installation.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/01_try-overview.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/01_try-tutorial.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/02_concepts-adapter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/02_concepts-data-streams.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/02_concepts-glossary.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/02_concepts-overview.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/02_concepts-pipeline.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/03_use-configurations.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/03_use-connect.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/03_use-dashboard.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/03_use-data-explorer.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/03_use-install-pipeline-elements.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/03_use-managing-pipelines.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/03_use-notifications.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/03_use-pipeline-editor.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/05_deploy-docker.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/05_deploy-kubernetes.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/05_deploy-security.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/05_deploy-use-ssl.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-archetypes.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-cli.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-first-processor.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-sdk-event-model.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-sdk-functions.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-sdk-migration-sd.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-sdk-output-strategies.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-sdk-static-properties.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-sdk-stream-requirements.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-setup.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-processors.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-sinks.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-sources.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/07_technicals-architecture.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/07_technicals-configuration.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/07_technicals-messaging.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/07_technicals-runtime-wrappers.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/07_technicals-user-guidance.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/08_debugging.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/08_monitoring.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/09_contribute.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/09_get-help.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/dev-guide-archetype.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/dev-guide-processor-sdk.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/dev-guide-sink-sdk.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/faq-common-problems.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.coindesk.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.flic.mqtt.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.gdelt.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iex.news.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iex.stocks.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.image.set.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.image.stream.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.influxdb.set.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.influxdb.stream.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iss.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.mysql.set.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.mysql.stream.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.netio.mqtt.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.netio.rest.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.opcua.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.plc4x.modbus.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.plc4x.s7.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.ros.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.machine.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.randomdataset.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.randomdatastream.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.slack.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.ti.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.wikipedia.edit.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.wikipedia.new.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.file.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.http.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.httpserver.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.kafka.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.mqtt.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.pulsar.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.flink.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.geocoding.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.reversegeocoding.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.staticgeocoding.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.qrcode.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.aggregation.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.count.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.eventcount.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.rate.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.changedetection.jvm.cusum.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.math.mathop.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.trigonometry.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.urldereferencing.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.timestamp.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.jvm.sizemeasure.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.compose.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.enrich.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.limit.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.merge.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.numericalfilter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.project.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.textfilter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.threshold.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.distancecalculator.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.speed.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.absence.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.and.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.peak-detection.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.sequence.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.frequency.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.frequencychange.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.increase.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.numericalfilter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.sequence.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.stop.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.statistics.flink.statistics-summary.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.flink.languagedetection.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.flink.wordcount.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.chunker.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.languagedetection.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.namefinder.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.partofspeech.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.tokenizer.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-converter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-mapper.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-renamer.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.fieldhasher.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.measurement-unit-converter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.processor.boilerplate.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.changed-value.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.count-array.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.duration-value.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.buffer.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.buffer.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.split-array.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.taskduration.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.protocol.set.file.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.protocol.set.http.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.jms.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.kafka.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.mqtt.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.nats.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.pulsar.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.rest.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.ditto.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.flink.elasticsearch.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.couchdb.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.influxdb.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.iotdb.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.mysql.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.opcua.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.postgresql.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.dashboard.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.datalake.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.notification.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.email.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.onesignal.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.slack.md delete mode 100644 website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.telegram.md delete mode 100644 website-v2/versioned_sidebars/version-0.70.0-sidebars.json diff --git a/website-v2/versioned_docs/version-0.70.0/01_try-installation.md b/website-v2/versioned_docs/version-0.70.0/01_try-installation.md deleted file mode 100644 index e179eb2d5..000000000 --- a/website-v2/versioned_docs/version-0.70.0/01_try-installation.md +++ /dev/null @@ -1,71 +0,0 @@ ---- -id: try-installation -title: Installation -sidebar_label: Installation -original_id: try-installation ---- - -import DownloadSection from '@site/src/components/download/DownloadSection.tsx'; - -The easiest way to install StreamPipes is our Docker-based installation. For production-grade deployments, we also -recommend looking at our Kubernetes support, which is also part of the installation kit. - -## Prerequisites - -The Docker-based installation requires **Docker** and **Docker Compose** to be installed on the target machine. -Installation instructions can be found below. - -
-
Install Docker
-

Go to https://docs.docker.com/installation/ and follow the instructions to install Docker for your OS. Make sure docker can be started as a non-root user (described in the installation manual, don’t forget to log out and in again) and check that Docker is installed correctly by executing docker-run hello-world

-
- -
-
Configure Docker
-

By default, Docker uses only a limited number of CPU cores and memory. - If you run StreamPipes on Windows or on a Mac you need to adjust the default settings. - To do that, click on the Docker icon in your tab bar and open the preferences. - Go to the advanced preferences and set the **number of CPUs to 6** (recommended) and the **Memory to 4GB**. - After changing the settings, Docker needs to be restarted.

- -### Supported operating systems - -The Docker-based installation supports the operating systems **Linux**, **Mac OS X** and **Windows 10**. Older windows -versions are not fully compatible with Docker. Linux VMs running under Windows might cause network problems with Docker, -therefore some manual work might be needed to make StreamPipes run properly. - -### Web Browser - -The StreamPipes application itself will be accessible through a web browser. We recommend a recent version of Chrome ( -best experience), Firefox or Edge. - -## Install StreamPipes - - - -## Setup StreamPipes - -Once you've opened the browser at the URL given above, you should see the StreamPipes application as shown below. At -initial startup, StreamPipes automatically performs an installation process. -After the installation has finished, continue by clicking on "Go to login -page", once all components are successfully configured. - -On the login page, enter your credentials, then you should be forwarded to the home page. - -Congratulations! You've successfully managed to install StreamPipes. Now we're ready to build our first pipeline! - - - -
-
Errors during the installation process
-

In most cases, errors during the installation are due to an under-powered system.
-If there is a problem with any of the components, please restart the whole system (docker-compose down and eventually also delete the volumes). - Please also make sure that your system meets the hardware requirements as mentioned in the first section of the installation guide.

-
- -## Next Steps - -That's it! To ease your first steps with StreamPipes, we've created an [interactive tutorial](try-tutorial). diff --git a/website-v2/versioned_docs/version-0.70.0/01_try-overview.md b/website-v2/versioned_docs/version-0.70.0/01_try-overview.md deleted file mode 100644 index 48be14900..000000000 --- a/website-v2/versioned_docs/version-0.70.0/01_try-overview.md +++ /dev/null @@ -1,122 +0,0 @@ ---- -id: user-guide-introduction -title: Apache StreamPipes Documentation -sidebar_label: Overview -original_id: user-guide-introduction ---- - -This is the documentation of Apache StreamPipes. - -StreamPipes Overview - -
-
-
-
- 🚀 Try -
-
-
Your first steps with Apache StreamPipes:
- Install StreamPipes 🔗 -
-
-
-
-
-
- 💡 Concepts -
-
-
Learn about some general concepts of StreamPipes:
- Overview 🔗 -
-
-
-
-
-
- 🎓 Use -
- -
-
-
-
-
- 📚 Pipeline Elements -
-
-
Available pipeline elements in StreamPipes:
- Adapters 🔗, - Data Processors 🔗, - Data Sinks 🔗 -
-
-
-
-
-
- ⚡ Deploy -
-
-
How to set up StreamPipes in test and production environments:
- Docker 🔗, - Kubernetes 🔗, - Use SSL 🔗 -
-
-
- -
-
-
- 🔧 Technicals -
-
-
Learn about technical concepts behind the curtain:
- Architecture 🔗, - User Guidance 🔗 , - Runtime Wrappers 🔗, - Messaging 🔗, - Configuration 🔗 -
-
-
-
-
-
- 👪 Community -
-
-
Get support and learn how to contribute to StreamPipes:
- Get Help 🔗, - Contribute 🔗 -
-
-
-
diff --git a/website-v2/versioned_docs/version-0.70.0/01_try-tutorial.md b/website-v2/versioned_docs/version-0.70.0/01_try-tutorial.md deleted file mode 100644 index fb7f86174..000000000 --- a/website-v2/versioned_docs/version-0.70.0/01_try-tutorial.md +++ /dev/null @@ -1,21 +0,0 @@ ---- -id: try-tutorial -title: Interactive Tutorial -sidebar_label: Interactive Tutorial -original_id: try-tutorial ---- - -Once you've installed StreamPipes and see the home screen, you'll see a number of modules that are part of the StreamPipes toolbox. -As a first step, you might be interested in taking the interactive tutorial that helps you create your first pipeline. -Switch to the **Pipeline Editor** and you will see a dialog that asks you for the start of the interactive tutorial: - -Tutorial Welcome Page - -Click **Start Tour** to start the tour. In this tour, you'll build a simple pipeline that monitors (simulated) live data from a water tank system. -Within the tour, perform the actions as recommended and click **Next** to trigger the next steps. Some tour steps won't require to select **Next**, but wait for you to take the recommended action. -You can cancel the tour anytime by clicking the **Exit Tour** button. - -Tutorial Welcome Page - -Now after you've built your first pipeline, you might be interested in reading about some of our core [concepts](concepts-overview) - diff --git a/website-v2/versioned_docs/version-0.70.0/02_concepts-adapter.md b/website-v2/versioned_docs/version-0.70.0/02_concepts-adapter.md deleted file mode 100644 index 584f5a543..000000000 --- a/website-v2/versioned_docs/version-0.70.0/02_concepts-adapter.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: concepts-adapter -title: Data Adapters -sidebar_label: Data Adapters -original_id: concepts-adapter ---- - -tbd diff --git a/website-v2/versioned_docs/version-0.70.0/02_concepts-data-streams.md b/website-v2/versioned_docs/version-0.70.0/02_concepts-data-streams.md deleted file mode 100644 index a8f25015d..000000000 --- a/website-v2/versioned_docs/version-0.70.0/02_concepts-data-streams.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: concepts-data-streams -title: Data Streams -sidebar_label: Data Streams -original_id: concepts-data-streams ---- - -tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/02_concepts-glossary.md b/website-v2/versioned_docs/version-0.70.0/02_concepts-glossary.md deleted file mode 100644 index 68a33967c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/02_concepts-glossary.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: concepts-glossary -title: Glossary -sidebar_label: Glossary -original_id: concepts-glossary ---- - -tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/02_concepts-overview.md b/website-v2/versioned_docs/version-0.70.0/02_concepts-overview.md deleted file mode 100644 index 4bb85e80c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/02_concepts-overview.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -id: concepts-overview -title: StreamPipes Concepts -sidebar_label: Overview -original_id: concepts-overview ---- - -To understand how StreamPipes works, the knowledge of a few core concepts, illustrated below, will be helpful. - -Overview of concepts - -## Adapter -An adapter connects to any external data source and forwards received events to the internal StreamPipes system. Within StreamPipes, the output of adapters are available in form of the two primary building blocks **Data Set** and **Data Stream**. -Adapters can be either created by using StreamPipes Connect, a module to easily connect to new data sources directly from the user interface, or by defining an adapter using the provided Software Development Kit (SDK). - -## Data Set / Data Stream -**Data Streams** and **Data Sets** represent the primary source for working with events in StreamPipes. -A stream is an ordered sequence of events, where an event typically consists of one or more observation values and additional metadata. The "structure" (or schema) of an event provided by a data stream or set is stored in the internal semantic schema registry of StreamPipes. -While data streams are typically unbounded, data sets have a fixed end and are internally "replayed" by the system from beginning to end once they are used as part of a pipeline. -As follows, although when referring to data streams, most concepts also apply for data sets. - -## Data Processor -**Data Processors** in StreamPipes transform one or more input data streams into an output data stream. -Such transformations can be rather simple, e.g. filtering based on a predefined rule or more complex, e.g. applying rule-based or learning-based algorithms on the data. -Data Processors can be applied on any data stream that matches the input requirements of a processor. In addition, most processors can be configured by providing user-defined parameters directly in the user interface. -Processing elements define stream requirements that are a set of minimum properties an incoming event stream must provide. Data processors can keep state or perform stateless operations. -At runtime, data streams are processed by using one of the underlying runtime wrappers (see the developer guide for more details). - -## Data Sink -**Data Sinks** consume event streams similar to Data Processors, but do not provide an output data stream. As such, data sinks typically perform some action or trigger a visualization as a result of a stream transformation. -Similar to data processors, sinks also require for the presence of specific input requirements of any bound data stream and can be customized. -StreamPipes provides several internal data sinks, e.g., to create notifications, visualize live data or persist historical data of incoming streams. In addition, various data sinks are provided to forward data streams to external systems such as databases. - -## Pipeline -A pipeline in Apache StreamPipes describes the transformation process from a data stream to a data sink. Typically, a pipeline consists of at least one data stream (or data set), zero or more data processors and at least one data sink. -Pipelines are built by users in a graphical way using the **Pipeline Editor** and can be started and stopped at any time. diff --git a/website-v2/versioned_docs/version-0.70.0/02_concepts-pipeline.md b/website-v2/versioned_docs/version-0.70.0/02_concepts-pipeline.md deleted file mode 100644 index 282642fc4..000000000 --- a/website-v2/versioned_docs/version-0.70.0/02_concepts-pipeline.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: concepts-pipelines -title: Pipelines -sidebar_label: Pipelines -original_id: concepts-pipelines ---- - -tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/03_use-configurations.md b/website-v2/versioned_docs/version-0.70.0/03_use-configurations.md deleted file mode 100644 index fda2f1169..000000000 --- a/website-v2/versioned_docs/version-0.70.0/03_use-configurations.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -id: use-configurations -title: Configurations -sidebar_label: Configurations -original_id: use-configurations ---- - -The configuration section is an admin-only interface for system-wide settings. - -## General configuration - -General configuration - -The general configuration serves to provide basic system settings. The basic settings allow to configure the app name (which is used, e.g., for mails sent by StreamPipes). -Additionally, the externally available host and port can be set which is used by the mail system to add links to emails. - -Furthermore, self-registration and password recovery features can be activated in this view. Note that both features require a working email configuration. - -## Datalake - -Datalake configuration - -Here, stored data lake databases can be truncated or deleted. The view also gives information on the number of data points currently stored in a measurement series. - -## Email configuration - -Email configuration - -In this section, the email configuration is set. The email configuration is used to send mails to users. Most standard mail server settings are supported. The configuration can be validated by triggering a test mail that is sent to a given recipient. - -## Messaging - -Messaging configuration - -Messaging configuration is used to control parameters used for communication between pipeline elements. Individual Kafka settings can be configured, as well as the priority of selected message formats and protocols during pipeline creation. - -## Pipeline Element Configuration - -Pipeline element configuration - -Individual configurations of extensions services are available in this view. The available configurations depend on the provided configuration variables in the service definition of each extensions service. - -## Security - -Messaging configuration - -The security configuration allows to manage existing user accounts, service accounts and groups. New users can be added and roles can be assigned. - -Please also read more about security [here](05_deploy-security.md). diff --git a/website-v2/versioned_docs/version-0.70.0/03_use-connect.md b/website-v2/versioned_docs/version-0.70.0/03_use-connect.md deleted file mode 100644 index 34495cb6e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/03_use-connect.md +++ /dev/null @@ -1,73 +0,0 @@ ---- -id: use-connect -title: StreamPipes Connect -sidebar_label: StreamPipes Connect -original_id: use-connect ---- - -StreamPipes Connect is the module to connect external data sources with Apache StreamPipes directly from the user interface. -StreamPipes Connect offers various adapters for common communication protocols and some specific sensors. Besides connecting data, StreamPipes Connect offers ways to pre-process data without the need to build pipelines and integrates a schema guesser that listens for incoming data and recommends the recognized event schema. - -The screenshot below illustrates the data marketplace, which shown after navigating to "StreamPipes Connect" and then clicking the "New adapter" button at the top. - -StreamPipes Connect Overview - -## Connecting new data sources - -### Data Marketplace -The data marketplace shows a list of all adapters that are currently installed in Apache StreamPipes. Each adapter offers various configuration options which depend on the specifics of the adapter. -Adapters are distinguished a) by the data source concept they provide (data set or data stream) and b) the adapter type, where we distinguish between _generic adapters_, which usually implement a generic communication protocol such as MQTT or Apache Kafka or a specific sensor interface (e.g., for Netio power sockets). -Several filter options are available to find a suitable adapter. The configuration of a new adapter starts with selecting one of the available adapters, which starts an assistant that supports the adapter generation. - -### Protocol/Basic Settings -In the first step, basic configurations need to be provided. For instance, for an Apache PLC4X adapter, the IP address of the PLC needs to be provided. In this example, we provide basic settings for connecting to an Apache Kafka broker. After all values are provided, the "Next" button opens the next step. - -StreamPipes Connect Basic Settings - -### Format Specification -The next step, format generation, is only available for generic adapters which support different message formats to be sent over the corresponding protocol. Think of a message broker that is able to consume messages in both JSON format or binary format. -Currently supported formats include XML, various JSON representations, images and CSV. After a format has been selected, further format configurations can be provided (depending on the selected format) to further customize the incoming message format. - -StreamPipes Connect Format Selection - -### Schema Editor -In the next step, based on the previously provided protocol and format settings, the system will either provide the fixed/pre-defined schema of the adapter or, in case of specific adapters, will connect to the underlying system and try to listen for incoming data. After a few seconds, the schema editor will appear that provides a list of detected fields from the incoming events (the schema). - -StreamPipes Connect Schema Editor - -In the toolbar, several configuration options are available which transform the original schema: - -* **Add Nested Property**. This option allows to modify the structure of the event by creating a nested structure. The schema can be simply changed by dragging and dropping fields into the nested structure. -* **Add Static Value**. This option allows to add a field containing a static value (e.g., an identifier) to the event. -* **Add Timestamp**. This options appends the current timestamp to each incoming event, useful in case the timestamp is not provided by the origin. -* **Refresh**. Re-triggers the schema guessing. -* **Delete field**. Select one or more fields by clicking the checkbox on the right and trigger the delete button. -* **Property scope**. For each field, a property scope can be defined which is either _Measurement_, _Dimension_ or _Header_. These values are later be used in the pipeline editor to assist in configuring pipeline elements and do not have any functional consequence. -Use _Measurement_ to indicate the field measures a value (e.g., a temperature value from a sensor), use _Dimension_ for any identifier (e.g., the sensor ID) and use _Header_ for any other metadata such as timestamps. - -For each field (also called event property) of the schema, additional configuration options are available by clicking the _Edit_ button: - -* **Label**. Used to provide a human-readable label for the field, which will ease the identification of fields when building pipelines. -* **Runtime Name.** This is the identifier of the field in the underlying message representation format (e.g., the JSON key). Renaming the runtime name will trigger a so-called _transformation rule_ which renames the incoming field name to the new field name before forwarding it to StreamPipes. -* **Domain Property/Semantic Type**. To help StreamPipes better understand the value which is represented by the field, semantic type information can be given. Up from StreamPipes 0.68.0, the semantic type can be selected from a wide range of available options. Additionally, an URL can be manually provided that indicates the meaning of the value (e.g., http://schema.org/Temperature). -* **Mark as Timestamp**. Indicates that the selected value represents a timestamp. When selected, a _timestamp converter_ can be configured which will convert incoming timestamps to the UNIX timestamp. -* **Runtime Type**. Here, the data type can be changed -* **Unit**. Allows to specify the unit in which the value is measured. Once selected, you can also automatically convert the unit to a target unit, which will then be inserted into the data stream produced by the adapter (see screenshot below). - -StreamPipes Connect Unit Conversion - -Assigning a timestamp is mandatory and can be either done by adding a timestamp from the menu, or by choosing an existing field and marking it as timestamp. - -### Adapter Generation -Finally, the adapter is ready to be started. In the _Adapter Generation_ page, a name and description for the resulting data stream must be provided. -Once started, StreamPipes creates your new adapter and displays a preview of the connected data, which refreshes about once per second. -Afterwards, the newly created data stream is available in the pipeline editor for further usage. - -StreamPipes Connect Adapter Generation - -## Managing adapters - -Currently running adapters are available in the "Running adapters" section of StreamPipes Connect. Existing adapters can be stopped and deleted. Currently, there is no mechanism to edit an existing adapter or to stop the adapter without deleting it. - -### Adapter Templates -For frequently used configurations, adapter templates can be created. An adapter template is a pre-configured adapter which can be further customized by users. Created adapter templates are available in the marketplace similar to standard adapters. diff --git a/website-v2/versioned_docs/version-0.70.0/03_use-dashboard.md b/website-v2/versioned_docs/version-0.70.0/03_use-dashboard.md deleted file mode 100644 index 4fc75c851..000000000 --- a/website-v2/versioned_docs/version-0.70.0/03_use-dashboard.md +++ /dev/null @@ -1,67 +0,0 @@ ---- -id: use-dashboard -title: Live Dashboard -sidebar_label: Live Dashboard -original_id: use-dashboard ---- - -The live dashboard can be used to visualize live data of data streams using a set of visualizations -The entry page of the live dashboard lists all created dashboards as in the screenshot below: - -StreamPipes Dashboard Overview - -## Visualizing Data Streams - -To visualize data streams in the live dashboard, a pipeline must be created that makes use of the so-called **Data Lake** sink. -Any data stream or data processor can serve as an input of the data lake sink. Switch to the pipeline editor, create a pipeline and configure the data lake sink. The visualization name is used to identify the sink in case multiple data lake sinks are used within a single pipeline. - -## Managing Dashboards -Multiple dashboards can be created, e.g., to organize different assets in a single dashboard view. - -A new dashboard can be created by clicking the _New Dashboard_ button, which opens a dialog that requires basic dashboard settings such as the title and description of the new dashboard. -Once created, the dashboard will be shown in the overview. Here, the following dashboard actions are available: - -* **Show** opens the dashboard. -* **Window** opens the dashboard in a new window with reduced controls, e.g., without the StreamPipes navigation and toolbar. This is a useful view for standalone displays that should visualize key parameters. -* **Settings** allows to modify the basic dashboard settings. -* **Edit** opens the dashboard in edit mode, where widgets can be added to the dashboard. -+ **Delete** deletes the selected dashboard. - -## Creating Visualizations - -Visualizations can be added to each dashboard in form of widgets. To add new visualizations, switch to the dashboard in _Edit_ mode. -In edit mode, a button appears that allows to add a new visualization. - -Adding a new visualization is supported by a wizard consisting of three steps: - -StreamPipes Dashboard Pipeline Selection - -* **Select pipeline** is the first step where a pipeline is selected on which the visualization is based. In this view, all pipelines are listed that have at least one **Dashboard Sink**. In case a pipeline contains multiple data lake sinks, the visualization name is listed below the pipeline name which eases discovering of the proper visualization. -* **Select widget** is the next step where the visualization widget must be selected. StreamPipes automatically filters this list based on input requirements of widgets. For instance, image visualizations are only visible if the input data stream provides an image object. -* **Configure widget** provides widget-specific settings to configure the visualization. In most cases, colors and titles of widgets can be modified. Additionally, chart-specific settings such as axis value ranges can be configured. - -StreamPipes Dashboard Widget Configuration - -By clicking _Create_, the new widget is placed on the canvas. Size and positioning of visualizations can be flexibly changed based on the provided grid. To change the widget configuration, the _Settings_ button of each widget can be clicked to re-open the configuration dialog. - -Once created, the dashboard provides a live view of all visualizations: - -StreamPipes Live Dashboard - - -Before the dashboard is closed, make sure to click the _Save_ button to persist the updated dashboard. Changes can be discarded by clicking the _Discard_ button. - - -## Available widgets - -The following visualizations are available in the latest release: - -* Area Chart -* Gauge -* HTML page (renders HTML markup) -* Image -* Line Chart -* Raw (displays the raw JSON input for debugging purposes) -* Single Value (displays a single measurement) -* Table -* Traffic Light diff --git a/website-v2/versioned_docs/version-0.70.0/03_use-data-explorer.md b/website-v2/versioned_docs/version-0.70.0/03_use-data-explorer.md deleted file mode 100644 index af31c65ab..000000000 --- a/website-v2/versioned_docs/version-0.70.0/03_use-data-explorer.md +++ /dev/null @@ -1,103 +0,0 @@ ---- -id: use-data-explorer -title: Data Explorer -sidebar_label: Data Explorer -original_id: use-data-explorer ---- - -The data explorer can be used to visualize and explore data streams that are persisted by using the **Data Lake** sink. - -StreamPipes Data Explorer Overview - -It provides a canvas (i.e. a data view) where various visualizations from multiple pipelines can be placed. For each data view, you can set a date and time range for the configured visualizations. - -## Using the data explorer - -### Get the data - -In the data explorer, any pipeline that uses the so-called **Data Lake** sink can be explored in the data explorer. Switch to the pipeline editor and add the data lake sink to a data processor or stream. -The sink requires an index name as a configuration parameter, which is used as an identifier in the data explorer. - -### Data Views & Widgets - -After your data is stored in the data lake, you can switch over to the data-explorer tab to create a novel data view and the widgets of your choice. In StreamPipes, a data view organizes a set of related widgets (i.e. data visualizations or plots) and gets assigned a single date and time range. The standard date and time range consists of the last 15 minutes of the current date and time. You can select predefined ranges (e.g. day or month) or configure the exact date and time range you want to explore. - -StreamPipes Data Explorer Component - -First create and name your data view and select the edit icon to proceed. In your data view, you can now add a new widget congiguration (plus icon) to configure and create your first widget. The widget configuration consists of (i) data, where the individual data sources in the data lake are selected, the properties for the widget are chosen and filters on the data sources are defined and applied, (ii) visualization, where the type of widget is chosen and the respective configuration for the widget type is done and (iii) appearance, where general style configurations for the widget (such as background color) can be performed. - -### Data Configuration - -The data configuration is the first step to define your widget. You can add several data sources (i.e. data sinks) and need to configure each added data source individually. This gives you sufficient freedom to combine the needed information, potentially consisting of different data resolutions, filters or types of information. - -StreamPipes Data Explorer Data Configuration - -After selecting the initial data source, you can choose if the underlying data query is to be performed raw, aggregated or single. Raw queries refer to using the data as-is, where you can define a limit on the number of events to guarantee performant usage in the application. In aggregated mode, you can choose among predefined aggregation granularites (e.g. day, minute, second). - -In the next step, you can choose the fields (i.e. properties of your data source) you are interested in exploring. If you selected aggregation or single mode, you can also modify the type of aggregation to be performed on the selected property. - -You can also filter your data source by adding conjunctive conditions. - -### Visualization Configuration - -The visualization configuration is dependent on the visulization type, which needs to be selected first. The data-explorer currently supports the following types: - -#### Table - -The table view formats the selected properties in table format. - -StreamPipes Data Explorer Table - -#### Map - -The map allows to visualize and explore coordinates on the world map. The configuration requires to choose the property which comprises the coordinates, allows to choose the marker style, a zoom level as well as the tooltip content. - -StreamPipes Data Explorer Map - -#### Heatmap - -The heatmap widget visualizes data in terms of the available intensity, where higher values are interpreted as being more intense. You only need to select the property which you want to visualize. Note that it might be interesting to aggregate the data in the data configuration to get more insights in your heatmap. - -StreamPipes Data Explorer Heatmap - -#### Time Series - -The time series widget allows you to do exploration and analysis for your numerical and boolean data properties. You can easily visualize your data properties in various styles (i.e. scatter, line, scattered line, bar or symbol) and colors, and configure a second y-axis for better interpretation of varying property ranges. - -StreamPipes Data Explorer Time Series 1 - -StreamPipes Data Explorer Time Series 2 - -StreamPipes Data Explorer Time Series 3 - -#### Image - -The image widget enables to integrate and visualize your image data. - -#### Indicator - -The indiator widget lets you visualize a single numerical value as well as (optionally) the delta to another indicator. You only need to configure the respective properties. - -StreamPipes Data Explorer Indicator - -#### 2D Correlation - -The correlation plot currently supports analyzing the relationship of two properties. Once selected, you can choose between a scatter view of the plotted data points or directly extract correlations in a density chart. - -StreamPipes Data Explorer Correlation 1 - -StreamPipes Data Explorer Correlation 2 - -#### Distribution - -In the distribution widget, you can quickly get an overview of your data range and common data values. You can either choose a histrogram view, where a bar chart is used to show data the frequency of automatically extracted data ranges or a pie view, where you can also select the granularity of how your data is clustered in terms of frequency. - -StreamPipes Data Explorer Distribution 1 - -StreamPipes Data Explorer Distribution 2 - -### Appearance Configuration - -Finally, you can change the title of your created widget as well as background and text colors in the appearance configuration. - -StreamPipes Data Explorer Appearance diff --git a/website-v2/versioned_docs/version-0.70.0/03_use-install-pipeline-elements.md b/website-v2/versioned_docs/version-0.70.0/03_use-install-pipeline-elements.md deleted file mode 100644 index 10be7e572..000000000 --- a/website-v2/versioned_docs/version-0.70.0/03_use-install-pipeline-elements.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -id: use-install-pipeline-elements -title: Install Pipeline Elements -sidebar_label: Install Pipeline Elements -original_id: use-install-pipeline-elements ---- - -## Install Pipeline Elements - -(coming soon) diff --git a/website-v2/versioned_docs/version-0.70.0/03_use-managing-pipelines.md b/website-v2/versioned_docs/version-0.70.0/03_use-managing-pipelines.md deleted file mode 100644 index 1aba73b1c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/03_use-managing-pipelines.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -id: use-managing-pipelines -title: Managing Pipelines -sidebar_label: Managing Pipelines -original_id: use-managing-pipelines ---- - -The pipeline view lists all created pipelines and provides several views and actions to manage the lifecycle of pipelines. - -In the entry screen, an overview of all created pipelines is shown: - -StreamPipes Pipeline Overview - -## Pipeline Actions -Within the pipeline overview, for each pipeline several actions are available: -* **Start/Stop pipeline** Starts or stops the selected pipeline. Once clicked, StreamPipes will trigger the selected action for all pipeline elements and open a success or error dialog as illustrated below. -* **Show details** opens the pipeline detail view (see below). -* **Modify pipeline** opens the pipeline in the pipeline editor, where the pipeline can be modified. Note that this button is only visible if the pipeline is not running. -* **Delete pipeline** opens a confirm dialog, which subsequently deletes the selected pipeline. - -The screenshot below shows the status of a pipeline after it has been successfully started. By clicking the _Show details_ button, more information on the status of each corresponding pipeline element microservice becomes available. In case of failures, the failure reason will be shown for each pipeline element that has failed to start. - -StreamPipes Pipeline Start Dialog - -## Organizing Pipelines into Categories -Pipelines can be organized into categories, which is a useful feature in case a larger amount of pipelines is created. -All categories will be shown as separate tabs in the pipeline overview. The same pipeline can be assigned to multiple categories. - -To add a new category or to add a new pipeline to an existing category, click the _Manage Categories_ button and configured the category and assigned pipelines in the dialog. - -## Pipeline Details -The pipeline details view can be opened by clicking the _Show details_ button in the pipeline overview panel. - -StreamPipes Pipeline Details - -### Overview -The overview section displays the graphical structure of the pipeline and provides some statistics about recent pipeline actions. Additionally, pipelines can be directly started, stopped, modified and deletes within this view. - -### Monitoring -Monitoring features will become available in version 0.68.0. - -### Errors -Monitoring of failures and logs will become available in version 0.69.0. - -### QuickEdit -The quick edit feature (only available for pipelines that are not running) is a quick and convenient way to modify some pipeline element configurations without opening the pipeline in the pipeline editor. -To use the quick edit feature, switch to the _QuickEdit_ tab, which will display the selected pipeline. - -By clicking a pipeline element from the preview canvas, available configuration options of the selected pipeline element can be modified. Note that only modifications that do not affect the pipeline structure (e.g., different output streams) can be changed. - -StreamPipes Pipeline Quick Edit - -After a configuration value was changed, make sure to click the _Update Pipeline_ button to save the changes. diff --git a/website-v2/versioned_docs/version-0.70.0/03_use-notifications.md b/website-v2/versioned_docs/version-0.70.0/03_use-notifications.md deleted file mode 100644 index 627efca69..000000000 --- a/website-v2/versioned_docs/version-0.70.0/03_use-notifications.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -id: use-notifications -title: Notifications -sidebar_label: Notifications -original_id: use-notifications ---- - -The notification module can be used to create internal notifications. - -StreamPipes Notifications - -## Using notifications - -Any pipeline that includes the data sink **Notification** can trigger notifications that appear in the notification view. To configure a new notification, switch to the pipeline editor and append the notification sink to a data processor or data stream. -The sink requires a title and message as configuration parameters. - -### Placeholders - -The notification message can include placeholders for fields which are replaced with the actual value at runtime. - -## Managing notifications - -The notification view is split into two parts. The left sides lists all pipelines which include a notification sink. By selecting a pipeline, available notifications will be shown in the right panel. -By scrolling up, older notifications become visible. Notifications that have appeared in the detail view will be automatically marked as read, so that only new, unread notifications will appear in the left toolbar. - - diff --git a/website-v2/versioned_docs/version-0.70.0/03_use-pipeline-editor.md b/website-v2/versioned_docs/version-0.70.0/03_use-pipeline-editor.md deleted file mode 100644 index 9762e7819..000000000 --- a/website-v2/versioned_docs/version-0.70.0/03_use-pipeline-editor.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -id: use-pipeline-editor -title: Pipeline Editor -sidebar_label: Pipeline Editor -original_id: use-pipeline-editor ---- - -The pipeline editor module supports building pipelines that transform a data stream using a set of resuable data processors and data sinks. -The empty pipeline editor looks similar to the illustration below after a new installation. - -StreamPipes Pipeline Editor Overview - -## Pipeline Elements -The four main concepts data sets, data streams, data processors and data sinks are available at the top of the pipeline editor. By switching the tabs, the individual pipeline elements for each category can be found. -By clicking the questionmark symbol, which appears when hovering over an element, additional information can be viewed (e.g., for data streams a live preview of incoming data and the documentation of the pipeline element for data processors and sinks). - -StreamPipes Pipeline Element Info - -## Creating Pipelines -Pipelines are built by dragging data streams, processors and sinks into the pipeline assembly area. Typically, a pipeline is built step-by-step starting with a data soure (stream or set). -Afterwards, data processors and sinks are subsequently added to the pipeline. Connections between pipeline elements are made by selecting the gray connector of the source and moving it to the target pipeline element. -Once a connection is made, StreamPipes performs a quick validation step and, in case two pipeline elements are compatible, automatically opens a configuration window. - -### Configuring Pipeline Elements -The configuration depends on the selected pipeline element and looks similar to the screenshot below. -In general, pipeline elements are configured by providing the required values. Once the pipeline element is fully configured, the _Save_ button activates and can be used to save the configuration for the pipeline element. - -StreamPipes Pipeline Element Configuration - -In addition, the following options are available in the pipeline element configuration menu: -* **Show documentation** extends the view and displays the pipeline element's documentation next to the configuration view. -* **Show only recommended settings** filters the list of available fields provided by the connected input data stream based on the _property scope_, e.g., so that only measurement values are displayed and dimension fields from the input stream are not available for selection. If deactivated, selections contain the full list of available fields that match the input requirement of the data processor. - -### Pipeline Element Options -Further options for a pipeline element can be displayed by hovering over a pipeline element in the assembly area, so that additional buttons appear around the pipeline element: - -* **Configure element** re-opens the configuration view to update the pipeline element configuration (only available for data processors and sinks) -* **Delete element** removes the pipeline element from the pipeline -* **Help** opens the pipeline element's documentation -* **Compatible element** opens a dialog which shows all pipeline elements that are compatible to the current element's output data stream. The dialog offers an alternative to selecting pipeline elements directly from the pipeline element selection in the top. -* **Pipeline Element Recommendation** opens a dialog which shows all recommended pipeline elements that are compatible the current element's output data stream. The recommendation is based on previously connected pipeline elements and is displayed below. - -### Pipeline Editor Options -Several pipeline editor options are available in the menu bar of the pipeline assembly: - -StreamPipes Pipeline Editor Options - -* **Save pipeline** opens the save dialog (see below) -* **Pan** allows to pan within the assembly area, useful for larger pipelines that do not fit in the screen -* **Select** is visible if pan mode is active and switches back to the default select mode -* **Zoom in/out** triggers the zoom in the pipeline assembly -* **Auto Layout** layouts the pipeline in a much more beautiful way than you are able to do by yourself ;-) -* **All pipeline modification saved** is displayed if the current pipeline has been cached. Cache updates are triggered after every change of the pipeline so that changes are not lost after reloading the window. -* **Hints** are shown to display current errors (e.g., incomplete pipelines). Details can be opened by clicking the hint button. -* **Clear assembly** clears the assembly and removes the current pipeline. - -### Saving a pipeline -To save a pipeline, press the _save pipeline_ button. A dialog pops up where a name and description of the pipeline can be entered (only name is mandatory). -Additionally, a pipeline can be directly started after it has been stored by checking the corresponding button. - -StreamPipes Save Pipeline Dialog - - diff --git a/website-v2/versioned_docs/version-0.70.0/05_deploy-docker.md b/website-v2/versioned_docs/version-0.70.0/05_deploy-docker.md deleted file mode 100644 index e7926c372..000000000 --- a/website-v2/versioned_docs/version-0.70.0/05_deploy-docker.md +++ /dev/null @@ -1,75 +0,0 @@ ---- -id: deploy-docker -title: Docker Deployment -sidebar_label: Docker Deployment -original_id: deploy-docker ---- - -StreamPipes Compose is a simple collection of user-friendly `docker-compose` files that easily lets gain first-hand experience with Apache StreamPipes. - -> **NOTE**: We recommend StreamPipes Compose to only use for initial try-out and testing. If you are a developer and -> want to develop new pipeline elements or core feature, use the [StreamPipes CLI](06_extend-cli.md). - -#### TL;DR: A one-liner to rule them all :-) - -```bash -docker-compose up -d -``` -Go to http://localhost to finish the installation in the browser. Once finished, switch to the pipeline editor and start the interactive tour or check the [online tour](https://streampipes.apache.org/docs/docs/user-guide-tour/) to learn how to create your first pipeline! - -## Prerequisites -* Docker >= 17.06.0 -* Docker-Compose >= 1.17.0 (Compose file format: 3.4) -* Google Chrome (recommended), Mozilla Firefox, Microsoft Edge - -Tested on: **macOS, Linux, Windows 10** (CMD, PowerShell, GitBash) - -**macOS** and **Windows 10** (Pro, Enterprise, Education) users can easily get Docker and Docker-Compose on their systems by installing **Docker for Mac/Windows** (recommended). - -> **NOTE**: On purpose, we disabled all port mappings except of http port **80** to access the StreamPipes UI to provide minimal surface for conflicting ports. - -## Usage -We provide two options to get you going: - -- **default**: a light-weight option with few pipeline elements, needs less memory -- **full**: contains more pipeline elements, requires **>16 GB RAM** (recommended) - -**Starting** the **default** option is as easy as simply running: -> **NOTE**: Starting might take a while since `docker-compose up` also initially pulls all Docker images from Dockerhub. - -```bash -docker-compose up -d -# go to after all services are started http://localhost -``` -After all containers are successfully started just got to your browser and visit http://localhost to finish the installation. Once finished, switch to the pipeline editor and start the interactive tour or check the [online tour](https://streampipes.apache.org/docs/docs/user-guide-tour/) to learn how to create your first pipeline! - -**Stopping** the **default** option is similarly easy: -```bash -docker-compose down -# if you want to remove mapped data volumes, run: -# docker-compose down -v -``` - -Starting the **full** option is almost the same, just specify the `docker-compose.full.yml` file: -```bash -docker-compose -f docker-compose.full.yml up -d -# go to after all services are started http://localhost -``` -Stopping the **full** option: -```bash -docker-compose -f docker-compose.full.yml down -#docker-compose -f docker-compose.full.yml down -v -``` - -## Update services -To actively pull the latest available Docker images use: -```bash -docker-compose pull -# docker-compose -f docker-compose.full.yml pull -``` - -## Upgrade -To upgrade to another StreamPipes version, simply edit the `SP_VERSION` in the `.env` file. -``` -SP_VERSION= -``` diff --git a/website-v2/versioned_docs/version-0.70.0/05_deploy-kubernetes.md b/website-v2/versioned_docs/version-0.70.0/05_deploy-kubernetes.md deleted file mode 100644 index ea593a5dc..000000000 --- a/website-v2/versioned_docs/version-0.70.0/05_deploy-kubernetes.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -id: deploy-kubernetes -title: Kubernetes Deployment -sidebar_label: Kubernetes Deployment -original_id: deploy-kubernetes ---- - -## Prerequisites -Requires Helm (https://helm.sh/) and an active connection to a kubernetes cluster with a running tiller server. - -Tested with: -* K3s v1.18.8+k3s1 (6b595318) with K8s v1.18.8 -* Helm v3.1.2 - -## Usage -We provide two helm chart options to get you going: - -- **default**: a light-weight option with few pipeline elements, needs less memory -- **full**: contains more pipeline elements, requires **>16 GB RAM** (recommended) - -**Starting** the **default** helm chart option is as easy as simply running the following command from the root of this folder: -> **NOTE**: Starting might take a while since we also initially pull all Docker images from Dockerhub. - -```bash -helm install streampipes ./ -``` -After a while, all containers should successfully started, indicated by the `Running` status. -```bash -kubectl get pods -NAME READY STATUS RESTARTS AGE -activemq-66d58f47cf-2r2nb 1/1 Running 0 3m27s -backend-76ddc486c8-nswpc 1/1 Running 0 3m27s -connect-master-7b477f9b79-8dfvr 1/1 Running 0 3m26s -connect-worker-78d89c989c-9v8zs 1/1 Running 0 3m27s -consul-55965f966b-gwb7l 1/1 Running 0 3m27s -couchdb-77db98cf7b-xnnvb 1/1 Running 0 3m27s -influxdb-b95b6479-r8wh8 1/1 Running 0 3m27s -kafka-657b5fb77-dp2d6 1/1 Running 0 3m27s -pipeline-elements-all-jvm-79c445dbd9-m8xcs 1/1 Running 0 3m27s -sources-watertank-simulator-6c6b8844f6-6b4d7 1/1 Running 0 3m27s -ui-b94bd9766-rm6zb 2/2 Running 0 3m27s -zookeeper-5d9947686f-6nzgs 1/1 Running 0 3m26s -``` - -After all containers are successfully started just got to your browser and visit any of the k8s cluster nodes on -`http://` to finish the installation. - -> **NOTE**: If you're running Docker for Mac or Docker for Windows with a local k8s cluster, the above step to use your host IP might not work. Luckily, you can port-forward a service port to your localhost using the following command to be able to access the UI either via `http://localhost` or `http://` (you require sudo to run this command in order to bind to a privileged port). -```bash -kubectl port-forward svc/ui --address=0.0.0.0 80:80 -``` - -Starting the **full** helm chart option is almost the same: -```bash -helm install streampipes ./ --set deployment=full -``` - -**Deleting** the current helm chart deployment: -```bash -helm del streampipes -``` diff --git a/website-v2/versioned_docs/version-0.70.0/05_deploy-security.md b/website-v2/versioned_docs/version-0.70.0/05_deploy-security.md deleted file mode 100644 index b9958c436..000000000 --- a/website-v2/versioned_docs/version-0.70.0/05_deploy-security.md +++ /dev/null @@ -1,76 +0,0 @@ ---- -id: deploy-security -title: Security -sidebar_label: Security -original_id: deploy-security ---- - -## Overriding default settings - -At installation time, StreamPipes checks for available environment variables relevant for the securing the system. If they are not set, it will use the default values. - -The following variables are checked by the core at installation time: - -* SP_INITIAL_ADMIN_EMAIL The email address of the initial administrator. -* SP_INITIAL_ADMIN_PASSWORD The password of the initial administrator. -* SP_INITIAL_CLIENT_USER The initial client user, used by the extensions modules to make authenticated API requests to the core. -* SP_INITIAL_CLIENT_SECRET The default password of the initial client user. -* SP_SETUP_INSTALL_PIPELINE_ELEMENTS Indicates whether pipeline elements should be installed. -* SP_ENCRYPTION_PASSCODE The encryption passcode, used for securely storing secrets (e.g., database connection strings). -* SP_JWT_SECRET The JWT secret, used for signing JWT tokens. - -In addition, all extensions services that perform requests to the core will need to have the following environment variables set: - -* SP_CLIENT_USER The client user, used by the extensions modules to make authenticated API requests to the core. -* SP_CLIENT_SECRET The password of the client user. - -Note that there are default values for all environment variables that are set at installation time - make sure to change these settings when moving to production! - -## Configuration - -Most security-related settings can be set in the configuration section of StreamPipes. The *General* section allows to set self-service registration and password recovery (both are disabled by default and require a valid email configuration). -In the *Security* section, users, service accounts, roles and groups can be configured. - - -## User types - -StreamPipes distinguishes between User Accounts (real users that interact with StreamPipes over the UI or an API) and Service Accounts (user-independent accounts which solely use StreamPipes over the API). - -User accounts are typically used by extensions service that require API access to the core (e.g., to get a list of running pipelines). - -## Permissions - -StreamPipes v0.69.0 comes with more advanced mechanisms to manage permissions. -For each major resource (pipeline elements, pipelines, StreamPipes Connect adapters, dashboards, data explorer views), permissions can be assigned individually to users and groups. - -To ease permission handling, StreamPipes comes with a default number of roles with pre-assigned privileges: - -### Roles - -* Admin The administrator role has full access to all resources. -* Service Admin The service administrator role has full access to all resources, but has no access to the UI. -* Pipeline Admin has full control of pipelines (create, edit, delete, start, stop, pause, resume, etc.). -* Pipeline User has limited control of pipelines (read only). -* Dashboard Admin has full control of dashboards (create, edit, delete, etc.). -* Dashboard User has limited control of dashboards (read only). -* Data Explorer Admin has full control of data explorer views (create, edit, delete, etc.). -* Data Explorer User has limited control of data explorer views (read only). -* Connect Admin has full control of StreamPipes Connect adapters (create, edit, delete, etc.). - -### Groups - -Roles can be either assigned to specific users or groups. Any group can contain several members. -The permissions of a user are the union of the permissions of all roles assigned to the user and the groups to which the user belongs. - -### Changing permissions - -Any resource has a resource owner, which is the authority that created the resource. Resources can be either public or private. Public resources are available to all users, while the user role determines what the user can do with the resource. -E.g., a public pipeline created by a user of role ROLE_ADMIN can be edited by all users with role PIPELINE_ADMIN, while the same pipeline can be read by all users with role PIPELINE_USER. - -Permissions can only be changed by admin users currently. -In the overview section of each resource (e.g., pipelines and dashboards), a permission dialog is available to users with role ROLE_ADMIN. The dialog allows to assign users and groups to the individual resource. - - - - - diff --git a/website-v2/versioned_docs/version-0.70.0/05_deploy-use-ssl.md b/website-v2/versioned_docs/version-0.70.0/05_deploy-use-ssl.md deleted file mode 100644 index f8c44a68b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/05_deploy-use-ssl.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -id: deploy-use-ssl -title: Use SSL -sidebar_label: Use SSL -original_id: deploy-use-ssl ---- - -This page explains how SSL Certificates can be used to provide transport layer security between your Browser and the Streampipes Backend. - -## Prerequisites -You need a valid Certificate consisting of a Private and a Public Key. Both Keys must be in PEM Format. Please note that your Private Key should never be shared, otherwise the communication can not be considered secure. - -## Edit docker-compose.yml -In order to use SSL you have to open port 443 on the nginx Service. Incoming insecure Traffic on Port 80 will be automatically rerouted to Port 443. - -The Environment-Variable NGINX_SSL must be set to "true". - -Finally you have to inject the Certificates into the Docker-Container. In the example below, the Certificates are placed in the directory /etc/ssl/private/ on the host machine. Please change the path according to the place where the Certificates are located on your machine. The path after the colon should not be changed! -```yaml -[...] - nginx: - image: apachestreampipes/ui - ports: - - "80:80" - - "443:443" - environment: - - NGINX_SSL=true - volumes: - - /etc/ssl/private/private.pem:/etc/nginx/ssl/ssl.pem - - /etc/ssl/private/public.pem:/etc/nginx/ssl/cert.pem - depends_on: - - backend - networks: - spnet: -[...] -``` diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-archetypes.md b/website-v2/versioned_docs/version-0.70.0/06_extend-archetypes.md deleted file mode 100644 index 2880d9289..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-archetypes.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -id: extend-archetypes -title: Maven Archetypes -sidebar_label: Maven Archetypes -original_id: extend-archetypes ---- - -In this tutorial we explain how you can use the Maven archetypes to develop your own StreamPipes processors and sinks. -We use IntelliJ in this tutorial, but it works with any IDE of your choice. - -## Prerequisites -You need to have Maven installed, further you need an up and running StreamPipes installation on your development computer. - -## Create Project -To create a new project, we provide multiple Maven Archteypes. -Currently, we provide archetypes for standalone Java-based microservices and archetypes for the experimental Flink wrapper. -The commands required to create a new pipeline element project can be found below. Make sure that you select a version compatible with your StreamPipes installation. -Copy the command into your terminal to create a new project. -The project will be created in the current folder. -First, the ``groupId`` of the resulting Maven artifact must be set. -We use ``groupId``: ``org.example`` and ``artifactId``: ``ExampleProcessor``. -You can keep the default values for the other settings, confirm them by hitting enter. - -The current {sp.version} is 0.69.0 (for a pre-release version, use the SNAPSHOT appendix, e.g. 0.69.0-SNAPSHOT) - -```bash -mvn archetype:generate \ - -DarchetypeGroupId=org.apache.streampipes \ - -DarchetypeArtifactId=streampipes-archetype-extensions-jvm \ - -DarchetypeVersion={sp.version} -``` -
- Other archetypes - -## Processors Flink -```bash -mvn archetype:generate \ - -DarchetypeGroupId=org.apache.streampipes \ - -DarchetypeArtifactId=streampipes-archetype-pe-processors-flink \ - -DarchetypeVersion={sp.version} -``` - -## Sinks Flink -```bash -mvn archetype:generate \ - -DarchetypeGroupId=org.apache.streampipes \ - -DarchetypeArtifactId=streampipes-archetype-pe-sinks-flink \ - -DarchetypeVersion={sp.version} -``` -
- - -## Project structure -Open the project in your IDE. -If everything worked, the structure should look similar to the following image. -In the *main* package, it is defined which processors / sinks you want to activate and the *pe.example* package contains two skeletons for creating a data processor and sink. -For details, have a look at the other parts of the Developer Guide, where these classes are explained in more depth. - -Project Structure - -## Next steps - -Click [here](06_extend-first-processor.md) to learn how to create your first data processor. - - diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-cli.md b/website-v2/versioned_docs/version-0.70.0/06_extend-cli.md deleted file mode 100644 index ee7c7d765..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-cli.md +++ /dev/null @@ -1,191 +0,0 @@ ---- -id: extend-cli -title: StreamPipes CLI -sidebar_label: StreamPipes CLI -original_id: extend-cli ---- - -The StreamPipes command-line interface (CLI) is focused on developers in order to provide an easy entrypoint to set up a suitable dev environment, either planning on developing - -* new extensions such as **connect adapters, processors, sinks** or, -* new core features for **backend** and **ui**. - -The main difference between the standard Docker/K8s installation is an improved communication between services running as containers and services running locally for development. - -The CLI can be found in the [main repository](https://github.com/apache/streampipes/tree/master/installer/cli) or in the ``installer/cli`` folder of the downloaded source code. - -## TL;DR - -```bash -streampipes env --list -[INFO] Available StreamPipes environment templates: -pipeline-element -... -streampipes env --set pipeline-element -streampipes up -d -``` -> **NOTE**: use `./streampipes` if you haven't add it to the PATH and sourced it (see section "Run `streampipes` from anywhere?"). - -## Prerequisites -The CLI is basically a wrapper around multiple `docker` and `docker-compose` commands plus some additional sugar. - -* Docker >= 17.06.0 -* Docker-Compose >= 1.26.0 (Compose file format: 3.4) -* Google Chrome (recommended), Mozilla Firefox, Microsoft Edge -* For Windows Developer: GitBash only - - -Tested on: **macOS**, **Linux**, **Windows***) - -> **NOTE**: *) If you're using Windows the CLI only works in combination with GitBash - CMD, PowerShell won't work. - - -## CLI commands overview - -``` -StreamPipes CLI - Manage your StreamPipes environment with ease - -Usage: streampipes COMMAND [OPTIONS] - -Options: - --help, -h show help - --version, -v show version - -Commands: - clean Remove StreamPipes data volumes, dangling images and network - down Stop and remove StreamPipes containers - env Inspect and select StreamPipes environments - info Get information - logs Get container logs for specific container - ps List all StreamPipes container for running environment - pull Download latest images from Dockerhub - restart Restart StreamPipes environment - up Create and start StreamPipes container environment - -Run 'streampipes COMMAND --help' for more info on a command. -``` - -## Usage: Along dev life-cycle - -**List** available environment templates. -```bash -streampipes env --list -``` - -**Inspect** services in an available environment to know what kind of services it is composed of. -```bash -streampipes env --inspect pipeline-element -``` - -**Set** environment, e.g. `pipeline-element`, if you want to write a new pipeline element. -```bash -streampipes env --set pipeline-element -``` - -**Start** environment ( default: `dev` mode). Here the service definition in the selected environment is used to start the multi-container landscape. -> **NOTE**: `dev` mode is enabled by default since we rely on open ports to core service such as `consul`, `couchdb`, `kafka` etc. to reach from the IDE when developing. If you don't want to map ports (except the UI port), then use the `--no-ports` flag. - -```bash -streampipes up -d -# start in production mode with unmapped ports -# streampipes up -d --no-ports -``` -Now you're good to go to write your new pipeline element :tada: :tada: :tada: - -> **HINT for extensions**: Use our [Maven archetypes](https://streampipes.apache.org/docs/docs/dev-guide-archetype/) to setup a project skeleton and use your IDE of choice for development. However, we do recommend using IntelliJ. - -> **HINT for core**: To work on `backend` or `ui` features you need to set the template to `backend` and clone the core repository [streampipes](https://github.com/apache/streampipes) - check the prerequisites there for more information. - -**Stop** environment and remove docker container -```bash -streampipes down -# want to also clean docker data volumes when stopping the environment? -# streampipes down -v -``` - -## Additionally, useful commands - -**Start individual services only?** We got you! You chose a template that suits your needs and now you only want to start individual services from it, e.g. only Kafka and Consul. - -> **NOTE**: the service names need to be present and match your current `.spenv` environment. - -```bash -streampipes up -d kafka consul -``` - -**Get current environment** (if previously set using `streampipes env --set `). -```bash -streampipes env -``` - -**Get logs** of specific service and use optional `--follow` flag to stay attached to the logs. -```bash -streampipes logs --follow backend -``` - -**Update** all services of current environment -```bash -streampipes pull -``` - -**Restart** all services of current environment or specific services -```bash -streampipes restart -# restart backend & consul -# streampipes restart backend consul -``` - -**Clean** your system and remove created StreamPipes Docker volumes, StreamPipes docker network and dangling StreamPipes images of old image layers. -```bash -streampipes clean -# remove volumes, network and dangling images -# streampipes clean --volumes -``` - -## Modify/Create an environment template -As of now, this step has to be done **manually**. All environments are located in `environments/`. - -```bash -├── adapter # developing a new connect adapter -├── backend # developing core backend features -├── basic # wanna run core, UI, connect etc from the IDE? -├── full # full version containing more pipeline elements -├── lite # few pipeline elements, less memory -├── pipeline-element # developing new pipeline-elements -└── ui # developing UI features -``` -**Modifying an existing environment template**. To modify an existing template, you can simply add a `` to the template. -> **NOTE**: You need to make sure, that the service your are adding exists in `deploy/standalone/service/`. If your're adding a completely new service take a look at existing ones, create a new service directory and include a `docker-compose.yml` and `docker-compose.dev.yml` file. - -``` -[environment:backend] -activemq -kafka -... - -``` - -**Creating a new** environment template. To create a new environment template, place a new file `environments/` in the template directory. Open the file and use the following schema. -> **IMPORTANT**: Please make sure to have `[environment:]` header in the first line of your new template matching the name of the file. Make sure to use small caps letters (lowercase) only. - -``` -[environment:] - - -... -``` - -## Run `streampipes` from anywhere? No problem -Simply add the path to this cli directory to your `$PATH` (on macOS, Linux) variable, e.g. in your `.bashrc` or `.zshrc`, or `%PATH%` (on Windows). - -For **macOS**, or **Linux**: - -```bash -export PATH="/path/to/streampipes-installer/installer/cli:$PATH" -``` - -For **Windows 10**, e.g. check this [documentation](https://helpdeskgeek.com/windows-10/add-windows-path-environment-variable/). - - -## Upgrade to new version -To upgrade to a new version, simply edit the version tag `SP_VERSION` in the `.env` file. diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-first-processor.md b/website-v2/versioned_docs/version-0.70.0/06_extend-first-processor.md deleted file mode 100644 index aa00bbb5d..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-first-processor.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: extend-first-processor -title: Your first data processor -sidebar_label: Your first data processor -original_id: extend-first-processor ---- - -In this section, we will explain how to start a pipeline element service and install it using the StreamPipes UI. - -Open the class *ExampleDataProcessor* and edit the ``onEvent`` method to print the incoming event, log it to the console and send it to the next component without changing it. - -```java -@Override -public void onEvent(Event event, SpOutputCollector collector) { - // Print the incoming event on the console - System.out.println(event); - - // Hand the incoming event to the output collector without changing it. - collector.collect(event); -} -``` - -## Start Processor -Starting from StreamPipes 0.69.0, the IP address of an extensions service (processor, adapter or sink) will be auto-discovered upon start. -The auto-discovery is done by the StreamPipes service discovery mechanism and should work for most setups. -Once you start an extensions service, you will see the chosen IP in printed in the console. Make sure that this IP does not point to localhost (127.0.0.1). -If you see such an IP or the extensions service complains that it cannot resolve the IP, you can manually set the IP address of the extensions service. You can do so by providing an SP_HOST environment variable. - - -To check if the service is up and running, open the browser on *'localhost:8090'* (or the port defined in the service definition). The machine-readable description of the processor should be visible as shown below. - -Project Structure - - -
-
Common Problems
-

-If the service description is not shown on 'localhost:8090', you might have to change the port address. -This needs to be done in the configuration of your service, further explained in the configurations part of the developer guide. - -If the service does not show up in the StreamPipes installation menu, click on 'MANAGE ENDPOINTS' and add 'http://YOUR_IP_OR_DNS_NAME:8090'. -Use the IP or DNS name you provided as the SP_HOST variable or the IP (if resolvable) found by the auto-discovery service printed in the console. -After adding the endpoint, a new processor with the name *Example* should show up. -

-
- -Now you can go to StreamPipes. -Your new processor *'Example'* should now show up in the installation menu ("Install Pipeline Elements" in the left navigation bar). -Install it, then switch to the pipeline view and create a simple pipeline that makes use of your newly created processor. -In case you opened the StreamPipes installation for the first time, it should have been automatically installed during the setup process. - -Project Structure - -Start this pipeline. -Now you should see logging messages in your console and, once you've created a visualization, you can also see the resulting events of your component in StreamPipes. - -Congratulations, you have just created your first processor! -From here on you can start experimenting and implement your own algorithms. diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-event-model.md b/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-event-model.md deleted file mode 100644 index f4bb8ed1e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-event-model.md +++ /dev/null @@ -1,142 +0,0 @@ ---- -id: extend-sdk-event-model -title: "SDK Guide: Event Model" -sidebar_label: "SDK: Event Model" -original_id: extend-sdk-event-model ---- - -## Introduction - -This guide explains the usage of the event model to manipulate runtime events for data processors and data sink. - -## Prerequisites - -This guide assumes that you are already familiar with the basic setup of [data processors](extend-first-processor). - -### Property Selectors - -In most cases, fields that are subject to be transformed by pipeline elements are provided by the assigned ``MappingProperty`` (see the guide on [static properties](extend-sdk-static-properties)). - -Mapping properties return a ``PropertySelector`` that identifies a field based on (i) the **streamIndex** and (ii) the runtime name of the field. -Let's assume we have an event with the following structure: - -```json -{ - "timestamp" : 1234556, - "temperature" : 37.0, - "deviceId" : "sensor1", - "running" : true, - "location" : {"latitude" : 34.4, "longitude" : -47}, - "lastValues" : [45, 22, 21] -} -``` - -In addition, we assume that a data processor exists (with one input node) that converts the temperature value (measured in degrees celsius) to a degree fahrenheit value. -In this case, a mapping property (selected by the pipeline developer in the StreamPipes UI) would link to the ``temperature`` field of the event. - -The mapping property value will be the ``PropertySelector`` of the temperature value, which looks as follows: - -``` -s0::temperature -``` - -``s0`` identifies the stream (in this case, only one input streams exist, but as data processors might require more than one input stream, a stream identifier is required), while the appendix identifies the runtime name. - -Note: If you add a new field to an input event, you don't need to provide the selector, you can just assign the runtime name as defined by the [output strategy](extend-sdk-output-strategies). - -### Reading Fields - -You can get a field from an event by providing the corresponding selector: - -```java - -@Override - public void onEvent(Event event, SpOutputCollector out) { - - PrimitiveField temperatureField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsPrimitive(); - } - -``` - -Similarly, if your mapping property links to a nested property, use - -```java - -@Override - public void onEvent(Event event, SpOutputCollector out) { - - NestedField nestedField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsNested(); - } - -``` - -and for a list-based field: - -```java - -@Override - public void onEvent(Event event, SpOutputCollector out) { - - ListField listField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsList(); - } - -``` - -### Parsing Fields - -#### Primitive Fields - -A ``PrimitiveField`` contains convenience methods to directly cast a field to the target datatype: - -```java - -// parse the value as a float datatype -Float temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsFloat(); - -// or do the same with a double datatype -Double temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsDouble(); - -// extracting a string -String deviceId = event.getFieldBySelector(deviceIdSelector).getAsPrimitive().getAsString(); - -// this also works for extracting fields from nested fields: -Double latitude = event.getFieldBySelector(latitudeSelector).getAsPrimitive().getAsDouble(); - -// extracting boolean values -Boolean running = event.getFieldBySelector(runningSelector).getAsPrimitive().getAsBoolean(); -``` - -In rare cases, you might want to receive a field directly based on the runtime name as follows: - -```java -Double temperature = event.getFieldByRuntimeName("temperature").getAsPrimitive().getAsDouble(); -``` - -#### List Fields - -Lists can also be retrieved by providing the corresponding selector and can automatically be parsed to a list of primitive datatypes: - -```java - -List lastValues = event.getFieldBySelector(lastValueSelector).getAsList().parseAsSimpleType(Integer.class); - -``` - -(coming soon: parsing complex lists) - - -### Adding/Updating Fields - -Primitive fields can easily be added to an event by providing the runtime name and the object: - -```java - - // add a primitive field with runtime name "city" and value "Karlsruhe" - event.addField("city", "Karlsruhe"); - - // remove the field "temperature" from the event - event.removeFieldBySelector(temperatureSelector); - - // add a new field - event.addField("fahrenheit", 48); -``` diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-functions.md b/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-functions.md deleted file mode 100644 index 77d2bb966..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-functions.md +++ /dev/null @@ -1,126 +0,0 @@ ---- -id: extend-sdk-functions -title: "SDK Guide: Functions" -sidebar_label: "SDK: Functions" -original_id: extend-sdk-functions ---- - -## Introduction - -Pipeline elements such as data processors and data sinks are a great way -to create _reusable_ components that can be part of pipelines. -However, creating a pipeline element is not always the best choice: - -* The behaviour of a data processor is bound to a specific input stream _and_ -* A data processor doesn't contain any user-defined configuration _and_ -* The intended action is fixed or known at build time and the data processor shouldn't be available in the pipeline editor. - -To cover such use cases, we provide _StreamPipes Functions_. Functions -are a great way to define custom processing logic based on previously -connected data streams. - -Functions can be registered in a similar way to pipeline elements, but define expected input -streams at startup time. Functions are started once the corresponding _extensions service_ starts -and run until the service is stopped. - -## Writing a function - -
-
Work in Progress
-

Functions are currently in preview mode and are not yet recommended for production usage. -APIs are subject to change in a future version.

-
- -To define a function, create a new extensions service using the [Maven Archetypes](06_extend-archetypes.md) or use an already existing service. - -### Skeleton - -Functions can be defined by creating a new class which extends the ``StreamPipesFunction`` class. - -The basic skeleton looks like this: - -```java -public class StreamPipesFunctionExample extends StreamPipesFunction { - - @Override - public FunctionId getFunctionId() { - return FunctionId.from("my-function-id", 1); - } - - @Override - public List requiredStreamIds() { - return List.of(""); - } - - @Override - public void onServiceStarted(FunctionContext context) { - // called when the service is started - } - - @Override - public void onEvent(Event event, String streamId) { - // called when an event arrives - } - - @Override - public void onServiceStopped() { - // called when the service is stopped - } -} - -``` - -The structure of a function class is easy to understand: -* _getFunctionId_ requires an identifier in form of a ``FunctionId``, which defines the id itself along with a version number that can be freely chosen. -* _requiredStreamIds_ expects a list of references to data streams that are already available in StreamPipes. See below to learn how to find the id of a stream in StreamPipes. -* _onServiceStarted_ is called once the extensions service is started and can be used to initialize the function. -* _onEvent_ is called every time a new event arrives and provides a ``streamId`` as a reference to the corresponding stream, which is useful in case multiple data streams are received by the function. -* _onServiceStopped_ is called when the extensions service is stopped and can be used to perform any required cleanup. - -### Getting a stream ID - -Functions require a reference to all data streams that should be retrieved by the function. -Currently, the only way to get the ID of a function is by navigating to the ``Asset Management`` view in the StreamPipes UI. -Create a new asset, click on ``Edit Asset`` and open ``Add Link`` in the _Linked Resources_ panel. -Choose ``Data Source`` as link type, select one of the available sources, copy the ``Resource ID`` and provide this ID in the ``requiredStreamIds`` method. - -### Function Context - -The ``onServiceStarted`` method provides a function context which provides several convenience methods to work with functions: - -* _getFunctionId_ returns the current function identifier -* _getConfig_ returns a reference to configuration options of the extensions service -* _getClient_ returns a reference to the StreamPipes client to interact with features from the REST API. -* _getStreams_ returns the data model of all data streams defined in the ``requiredStreamIds`` method. -* _getSchema_ returns the schema of a specific data stream by providing the ``streamId`` - - -## Registering a function - -Registering a function is easy and can be done in the _Init_ class of the service. -E.g., considering a service definition as illustrated below, simply call ``registerFunction`` and -provide an instance of your function. - -```java - - @Override - public SpServiceDefinition provideServiceDefinition() { - return SpServiceDefinitionBuilder.create("my-service-id", - "StreamPipes Function Example", - "", - 8090) - .registerFunction(new MyExampleFunction()) - .registerMessagingFormats( - new JsonDataFormatFactory()) - .registerMessagingProtocols( - new SpNatsProtocolFactory()) - .build(); - } - -``` - -## Metrics & Monitoring - -Similar to pipeline elements, function register at the StreamPipes core. -Running functions can be seen in the pipeline view of the user interface under _Functions_, right below the list of available pipelines. -Similar to pipelines, simple metrics, monitoring info and exceptions can be viewed in the _Details_ section of each function. diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-migration-sd.md b/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-migration-sd.md deleted file mode 100644 index 054d11c60..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-migration-sd.md +++ /dev/null @@ -1,117 +0,0 @@ ---- -id: extend-sdk-migration-service-discovery -title: "Migration Guide: New Service Discovery in 0.69.0" -sidebar_label: "Migration Guide: 0.69.0" -original_id: extend-sdk-migration-service-discovery ---- - - -## Introduction -As part of our roadmap towards a release 1.0, Apache StreamPipes 0.69.0 features a new service discovery approach along with performance improvements related to a new storage layer for pipeline element descriptions. - -The new service discovery approach is better suited for cloud-native scenarios, as the hostname of a pipeline element is now decoupled from its description. As such, StreamPipes now supports recovery of pipeline elements independent from their assigned host. -In addition, the new approach simplifies development of StreamPipes, e.g., in cases where the core is running in Docker and pipeline elements are developed on a local machine. In this case, the IP of the host machine should now be auto-discovered so that provision of environement variables should now be obsolete in most cases. -The second large improvement is related to the replacement of RDF4J as the storage engine with a NoSQL database. This leads to much faster load times (you will notice this especially at system startup). - -While we are heavily working towards a stable release 1.0, we decided to put our efforts into the remaining features required for 1.0 and do not provide an auto-migration related to some breaking changes. -Therefore, we recommend to reinstall StreamPipes when updating to 0.69.0. We currently plan to have at most two more releases before releasing the first 1.x version of StreamPipes. - -## Installation -* Before upgrading to 0.69.0, clean any existing installation (e.g., by running ``docker-compose down -v``) and make sure that no volumes of StreamPipes are left. -* Upgrade to the latest installer version (can be found at [streampipes/installer](https://github.com/apache/streampipes/tree/dev/installer)) -* Upon restart, make sure that the setup dialog appears (make sure that the new StreamPipes logo appears) and re-initialize the system. - -## SDK changes - -0.69.0 comes with a new ``ServiceDefinitionBuilder`` for pipelines, which simplifies the definition of a pipeline element. - -The ServiceDefinitionBuilder requires an ID of your extensions service, an optional title and description and a default port. -It is best to provide 8090 as the default port, so that this will be the standard port of all StreamPipes extensions services at deployment time in a containerized environment. -The port port can always be overriden by providing an ``SP_PORT`` environment variable. - -### Init class - -Modify the Init class of your pipeline element service as follows: - -```java -public class ExamplesInit extends StandaloneModelSubmitter { - - public static void main(String[] args) { - new ExamplesInit().init(); - } - - @Override - public SpServiceDefinition provideServiceDefinition() { - return SpServiceDefinitionBuilder.create("org.apache.streampipes.processors.examples.jvm", - "StreamPipes Code Examples", - "", - 8090) - .registerMessagingProtocols(new SpKafkaProtocolFactory(), new SpJmsProtocolFactory()) - .registerMessagingFormats(new JsonDataFormatFactory()) - .registerPipelineElement(new MyPipelineElementController()) - .registerAdapter(new MyAdapter()) - .build(); - } -} -``` - -You can now easily define a StreamPipes extensions service that supports both custom adapters and pipeline elements by using the following Maven dependency: -This is optional and no changes to your existing Maven dependencies (except the version, e.g., 0.69.0-SNAPSHOT) are required. - -```maven - - org.apache.streampipes - streampipes-container-extensions - -``` - - -### Configs -Prior to version 0.69.0, additionally configs had to be provided in a separate ``Config`` class. This is now obsolete - configs can be directly provided within the builder class as follows: - -```java - - @Override - public SpServiceDefinition provideServiceDefinition() { - return SpServiceDefinitionBuilder.create("org.apache.streampipes.processors.examples.jvm", - "StreamPipes Code Examples", - "", - 8090) - .registerPipelineElement(new MyPipelineElement()) - .registerAdapter(new MyAdapter()) - .addConfig("key", 1) - .addConfig("my-string-config", "myvalue") - .build(); - } -``` - -Configs can be easily accessed from the ``EventProcessorRuntimeContext`` (or ``EventSinkRuntimeContext``): - -```java -@Override - public void onInvocation(Parameters params, - SpOutputCollector spOutputCollector, - EventProcessorRuntimeContext context) { - - Integer myConfigValue = context.getConfigStore().getConfig().getInteger("key"); - } -``` - - -### Service Discovery -An extensions service can be started by executing the Init class. StreamPipes will now automatically select the proper service IP address and register the service in Consul. -You can inspect the selected IP address in the console: - -``` -16:41:58.342 SP [main] INFO o.a.s.commons.networking.Networking - Using auto-discovered IP: 172.30.80.1 -16:41:58.364 SP [main] INFO o.a.s.commons.networking.Networking - Using port from provided environment variable SP_PORT: 6025 -16:41:58.367 SP [main] INFO o.a.s.c.init.DeclarersSingleton - Registering 0 configs in key/value store -16:41:58.400 SP [main] INFO o.a.s.s.consul.ConsulProvider - Checking if consul is available... -16:41:58.419 SP [main] INFO o.a.s.s.consul.ConsulProvider - Successfully connected to Consul -``` - -In some (rare) cases, a non-resolvable IP will be selected. In this case, you can manually override the IP by providing a ``SP_HOST`` environment variable. This falls back to a similar behaviour as in pre-0.69.0-versions and will use the manually provided IP. - - - - diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-output-strategies.md b/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-output-strategies.md deleted file mode 100644 index fb0412f8e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-output-strategies.md +++ /dev/null @@ -1,349 +0,0 @@ ---- -id: extend-sdk-output-strategies -title: "SDK Guide: Output Strategies" -sidebar_label: "SDK: Output Strategies" -original_id: extend-sdk-output-strategies ---- - -## Introduction -In StreamPipes, output strategies determine the output of a data processor. -As the exact input schema of a processor is usually not yet known at development time (as processors can be connected with any stream that matches their requirements), output strategies are a concept to define how an input data stream is transformed to an output data stream. - -The following reference describes how output strategies can be defined using the SDK. - -
-
Code on Github
-

For all examples, the code can be found on Github.

-
- -## Reference - -The methods described below to create static properties are available in the ``ProcessingElementBuilder`` class and are usually used in the ``declareModel`` method of the controller class. - -As follows, we will use the following example event to explain how output strategies define the output of a data processor: - -```json -{ - "timestamp" : 1234556, - "temperature" : 37.0, - "deviceId" : "1" - -} -``` - -### Keep Output - -A ``KeepOutputStrategy`` declares that the output event schema will be equal to the input event schema. -In other terms, the processor does not change the schema, but might change the values of event properties. - -A keep output strategy can be defined as follows: - -```java - -@Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + - ".keep", "Keep output example example", "") - .requiredStream(StreamRequirementsBuilder. - create() - .requiredProperty(EpRequirements.anyProperty()) - .build()) - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - - // declaring a keep output strategy - .outputStrategy(OutputStrategies.keep()) - - .build(); - } - -``` - -According to the example above, the expected output event schema of the example input event would be: - -```json -{ - "timestamp" : 1234556, - "temperature" : 37.0, - "deviceId" : "1" - -} -``` - -Data processors that perform filter operations (e.g., filtering temperature values that are above a given threshold) are a common example for using keep output strategies. - - -### Fixed Output - -A ``FixedOutputStrategy`` declares that the data processor itself provides the event schema. The output schema does not depend on the input event. - -Fixed output strategies need to provide the event schema they produce at development time: - -```java - - @Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + - ".fixed", "Fixed output example", "") - .requiredStream(StreamRequirementsBuilder. - create() - .requiredProperty(EpRequirements.anyProperty()) - .build()) - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - - // the fixed output strategy provides the schema - .outputStrategy(OutputStrategies.fixed(EpProperties.timestampProperty("timestamp"), - EpProperties.doubleEp(Labels.from("avg", "Average value", ""), "avg", SO.Number))) - - .build(); - } - -``` - -In this example, we declare that the output schema always consists of two fields (``timestamp`` and ``avg``). - -Therefore, an output event should look like: - -```json -{ - "timestamp" : 1234556, - "avg" : 36.0 -} -``` - - -### Append Output - -An ``AppendOutputStrategy`` appends additional fields to a schema of an incoming event stream. For instance, data processors that perform enrichment operations usually make use of append output strategies. - -Similar to the fixed output strategy, the additional fields must be provided at development time in the controller method as follows: - -```java - @Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + - ".append", "Append output example", "") - - // boilerplate code not relevant here, see above - - // declaring an append output - .outputStrategy(OutputStrategies.append(EpProperties.integerEp(Labels.from("avg", - "The average value", ""), "avg", SO.Number))) - - .build(); - } -``` - -In this case, the output event would have an additional field ``avg``: - -```json -{ - "timestamp" : 1234556, - "temperature" : 37.0, - "deviceId" : "1", - "avg" : 123.0 - -} -``` - -### Custom Output - -In some cases, pipeline developers using the StreamPipes UI should be able to manually select fields from an input event schema. For such use cases, a ``CustomOutputStrategy`` can be used: - -```java - -@Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + - ".custom", "Custom output example", "") - - // boilerplate code not relevant here, see above - - // declaring a custom output - .outputStrategy(OutputStrategies.custom()) - - .build(); - } - -``` - -If a data processor defines a custom output strategy, the customization dialog in the pipeline editor will show a dialog to let users select the fields to keep: - -Number Parameter - -Taking our example, and assuming that the user selects both the ``timestamp`` and the ``temperature`` the expected output event should look like this: - -```json -{ - "timestamp" : 1234556, - "temperature" : 37.0 -} -``` - -How do we know which fields were selected once the data processor is invoked? Use the proper method from the extractor in the ``onInvocation`` method: - -```java -@Override - public ConfiguredEventProcessor onInvocation(DataProcessorInvocation graph, ProcessingElementParameterExtractor extractor) { - - List outputSelectors = extractor.outputKeySelectors(); - - return new ConfiguredEventProcessor<>(new DummyParameters(graph), DummyEngine::new); - } -``` - -### Transform Output - -A ``TransformOutputStrategy`` declares that one or more fields of an incoming event stream are transformed. Transformations can be applied to the datatype of the property, the runtime name of the property, or any other scheam-related declaration such as measurement units. - -#### Static Transform Operations - -Static transform operations do not depend on any user input (at pipeline development time) in order to know how to transform a field of an incoming event schema. - -Let's say our data processor transforms strings (that are actually a number) to a number datatype. In this case, we can use a static transform output strategy: - -```java - - @Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + - ".transform", "Transform output example example", "") - .requiredStream(StreamRequirementsBuilder. - create() - .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from - ("str", "The date property as a string", ""), PropertyScope.NONE) - .build()) - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - - // static transform operation - .outputStrategy(OutputStrategies.transform(TransformOperations - .staticDatatypeTransformation("str", Datatypes.Long))) - - .build(); - } - -``` - -Note the mapping property that we use to determine which field of the input event should be transformed. - -The expected output event would look like this: - -```json -{ - "timestamp" : 1234556, - "temperature" : 37.0, - "deviceId" : 1 -} -``` - -#### Dynamic Transform Operations - -Sometimes, user input depends on the exact transform output. Let's take a field renaming processor as an example, which lets the user rename a field from an input event schema to another field name. -For such use cases, we can use a ``DynamicTransformOperation``: - -```java - - @Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + - ".transform", "Transform output example example", "") - .requiredStream(StreamRequirementsBuilder. - create() - .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from - ("str", "The date property as a string", ""), PropertyScope.NONE) - .build()) - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - - // the text input to enter the new runtime name - .requiredTextparameter(Labels.from("new-runtime-name", "New Runtime Name", "")) - - // static transform operation - .outputStrategy(OutputStrategies.transform(TransformOperations - .dynamicRuntimeNameTransformation("str", "new-runtime-name"))) - - .build(); - } - -``` - -For dynamic transform operations, an additional identifier that links to another static property can be assigned and later be fetched in the ``onInvocation`` method. - -Assuming we want to rename the field ``temperature`` to ``temp``, the resulting output event should look like this: - -```json -{ - "timestamp" : 1234556, - "temp" : 37.0, - "deviceId" : 1 -} -``` - -### Custom Transform Output - -Finally, in some cases the output schema cannot be described at pipeline development time. For these (usually rare) cases, a ``CustomTransformOutput`` strategy can be used. - -In this case, a callback function will be invoked in the controller class just after a user has filled in any static properties and clicks on ``Save`` in the pipeline editor. - -To define a custom transform output, we need to implement an interface in the controller class: - -```java -public class CustomTransformOutputController extends - StandaloneEventProcessingDeclarer implements - ResolvesContainerProvidedOutputStrategy { - - -@Override - public EventSchema resolveOutputStrategy(DataProcessorInvocation processingElement, ProcessingElementParameterExtractor parameterExtractor) throws SpRuntimeException { - - } -} -``` - -In addition, the output strategy must be declared in the ``declareModel`` method: - -```java - -@Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" + - ".customtransform", "Custom transform output example example", "") - .requiredStream(StreamRequirementsBuilder. - create() - .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from - ("str", "The date property as a string", ""), PropertyScope.NONE) - .build()) - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - - // declare a custom transform output - .outputStrategy(OutputStrategies.customTransformation()) - - .build(); - } - -``` - -Once a new pipeline using this data processor is created and the configuration is saved, the ``resolveOutputStrategy`` method will be called, so that an event schema can be provided based on the given configuration. An extractor instance (see the guide on static properties) is available to extract the selected static properties and the connected event stream. - -```java -@Override - public EventSchema resolveOutputStrategy(DataProcessorInvocation processingElement, ProcessingElementParameterExtractor parameterExtractor) throws SpRuntimeException { - return new EventSchema(Arrays - .asList(EpProperties - .stringEp(Labels.from("runtime", "I was added at runtime", ""), "runtime", SO.Text))); - } -``` - -In this example, the output event schema should look like this: - -```json -{ - "runtime" : "Hello world!" -} -``` - diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-static-properties.md b/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-static-properties.md deleted file mode 100644 index 51fec4c3e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-static-properties.md +++ /dev/null @@ -1,267 +0,0 @@ ---- -id: extend-sdk-static-properties -title: "SDK Guide: Static Properties" -sidebar_label: "SDK: Static Properties" -original_id: extend-sdk-static-properties ---- - -## Introduction -Static properties represent user-faced parameters that are provided by pipeline developers. -Processing elements can specify required static properties, which will render different UI views in the pipeline editor. - -The following reference describes how static properties can be defined using the SDK. - -
-
Code on Github
-

For all examples, the code can be found on Github.

-
- -## Reference - -The methods described below to create static properties are available in the ``ProcessingElementBuilder`` and ``DataSinkBuilder`` classes and are usually used in the ``declareModel`` method of the controller class. - -### Mapping property - -In StreamPipes, processing elements usually operate on fields of an event stream. For instance, a filter processor operates on a specific field from an input stream (e.g., a field measuring the temperature). -Typically, pipeline developers should select the exact field where the operations is applied upon by themselves. -As this field is not yet known at pipeline element development time (as it is defined by the pipeline developer in the pipeline editor), mapping properties serve to map a stream requirement to a specific field from the actual input event stream. - -### Unary mapping property - -A unary mapping property maps a stream requirement to an actual field of an event stream. Therefore, the ``StreamRequirementsBuilder`` provides the opportunity to directly add a mapping property based along with a property requirement: - -```java -.requiredStream(StreamRequirementsBuilder. - create() - .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(), - Labels.from("mp-key", "My Mapping", ""), - PropertyScope.NONE) - .build()) -``` - -This leads to a selection dialog in the pipeline element customization which provides the user with a selection of all event properties (fields) from the input stream that match the specified property requirement: - -Text - -At invocation time, the value can be extracted in the ``onInvocation`` method as follows: - -```java -// Extract the mapping property value -String mappingPropertySelector = extractor.mappingPropertyValue("mp-key"); -``` - -Note that this method returns a ``PropertySelector``, which can be used by the event model to extract the actual value of this field. - -### N-ary mapping property - -N-ary mapping properties work similar to unary mapping properties, but allow the mapping of one requirement to multiple event properties matching the requirement: - -```java -.requiredStream(StreamRequirementsBuilder. - create() - .requiredPropertyWithNaryMapping(EpRequirements.numberReq(), - Labels.from("mp-key", "My Mapping", ""), - PropertyScope.NONE) - .build()) -``` - -This renders the following selection, where users can select more than one matching event property: - -Text - -The following snippet returns a list containing the property selectors of all event properties that have been selected: - -```java -// Extract the mapping property value -List mappingPropertySelectors = extractor.mappingPropertyValues("mp-key"); -``` - -### Free-Text Parameters - -A free-text parameter requires the pipeline developer to enter a single value - which can be a string or another primitive data type. -The input of free-text parameters can be restricted to specific value ranges or can be linked to the value set of a connected input data stream. - -#### Text Parameters - -A text parameter lets the user enter a string value. The following code line in the controller class - -```java -.requiredTextParameter(Labels.from(SP_KEY, "Example Name", "Example Description")) -``` - -leads to the following input dialog in the pipeline editor: - -Text - -Users can enter any value that will be converted to a string datatype. To receive the entered value in the ``onInvocation`` method, use the following method from the ``ParameterExtractor`` - -```java -String textParameter = extractor.singleValueParameter(SP_KEY, String.class); -``` - -#### Number parameters - -A number parameter lets the user enter a number value, either a floating-point number or an integer: - -```java -// create an integer parameter -.requiredIntegerParameter(Labels.from(SP_KEY, "Integer Parameter", "Example Description")) - -// create a float parameter -.requiredFloatParameter(Labels.from("float-key", "Float Parameter", "Example Description")) - -``` - -leads to the following input dialog in the pipeline editor only accepting integer values: - -Number Parameter - -The pipeline editor performs type validation and ensures that only numbers can be added by the user. To receive the entered value in the ``onInvocation`` method, use the following method from the ``ParameterExtractor`` - -```java -// Extract the integer parameter value -Integer integerParameter = extractor.singleValueParameter(SP_KEY, Integer.class); - -// Extract the float parameter value -Float floatParameter = extractor.singleValueParameter("float-key", Float.class); - -``` - -#### Numbers with value specification - -You can also specify the value range of a number-based free text parameter: - -```java -// create an integer parameter with value range -.requiredIntegerParameter(Labels.from(SP_KEY, "Integer Parameter", "Example Description"), 0, 100, 1) - -``` - -which renders the following input field: - -Number Parameter - -Receive the entered value in the same way as a standard number parameter. - -#### Free-text parameters linked to an event property - - -### Single-Value Selections - -Single-value selections let the user select from a pre-defined list of options. -A single-value selection requires to select exactly one option. - -```java -.requiredSingleValueSelection(Labels.from("id", "Example Name", "Example Description"), - Options.from("Option A", "Option B", "Option C")) - -``` - -Single-value selections will be rendered as a set of radio buttons in the pipeline editor: - -Number Parameter - -To extract the selected value, use the following method from the parameter extractor: - -```java -// Extract the selected value -String selectedSingleValue = extractor.selectedSingleValue("id", String.class); -``` - -
-
Declaring options
-

Sometimes, you may want to use an internal name that differs from the display name of an option. -For that, you can use the method Options.from(Tuple2{'<'}String, String{'>'}) and the extractor method selectedSingleValueInternalName.

-
- - - -### Multi-Value Selections - -Multi-value selections let the user select from a pre-defined list of options, where multiple or no option might be selected. - -```java -.requiredMultiValueSelection(Labels.from("id", "Example Name", "Example Description"), - Options.from("Option A", "Option B", "Option C")) - -``` - -Multi-value selections will be rendered as a set of checkboxes in the pipeline editor: - -Number Parameter - -To extract the selected value, use the following method from the parameter extractor: - -```java -// Extract the selected value -List selectedMultiValue = extractor.selectedMultiValues("id", String.class); -``` - -### Domain Concepts - -(coming soon...) - -### Collections - -You can also define collections based on other static properties. - -```java -// create a collection parameter -.requiredParameterAsCollection(Labels.from("collection", "Example Name", "Example " + - "Description"), StaticProperties.stringFreeTextProperty(Labels - .from("text-property","Text",""))) -``` - -While the items of the collection can be provided in the same way as the underlying static property, the UI provides buttons to add and remove items to the collections. - -Number Parameter - -To extract the selected values from the collection, use the following method from the parameter extractor: - -```java -// Extract the text parameter value -List textParameters = extractor.singleValueParameterFromCollection("collection", String.class); -``` - -### Runtime-resolvable selections - -In some cases, the options of selection parameters are not static, but depend on other values or might change at runtime. In this case, you can use runtime-resolvable selections. - -First, let your controller class implement ``ResolvesContainerProvidedOptions``: - -```java -public class RuntimeResolvableSingleValue extends - StandaloneEventProcessingDeclarer implements ResolvesContainerProvidedOptions { ... } -``` - -Next, define the parameter in the ``declareModel`` method: - -```java -// create a single value selection parameter that is resolved at runtime - .requiredSingleValueSelectionFromContainer(Labels.from("id", "Example Name", "Example " + - "Description")) -``` - -Finally, implement the method ``resolveOptions``, which will be called at runtime once the processor is used: - -```java - @Override - public List resolveOptions(String requestId, EventProperty linkedEventProperty) { - return Arrays.asList(new RuntimeOptions("I was defined at runtime", "")); - } -``` - -The UI will render a single-value parameter based on the options provided at runtime: - -Number Parameter - -The parameter extraction does not differ from the extraction of static single-value parameters. - -
-
Multi-value selections
-

Although this example shows the usage of runtime-resolvable selections using single value selections, the same also works for multi-value selections!

-
- - diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-stream-requirements.md b/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-stream-requirements.md deleted file mode 100644 index 98a9215a7..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-sdk-stream-requirements.md +++ /dev/null @@ -1,179 +0,0 @@ ---- -id: extend-sdk-stream-requirements -title: "SDK Guide: Stream Requirements" -sidebar_label: "SDK: Stream Requirements" -original_id: extend-sdk-stream-requirements ---- - -## Introduction - -Data processors and data sinks can define ``StreamRequirements``. Stream requirements allow pipeline elements to express requirements on an incoming event stream that are needed for the element to work properly. -Once users create pipelines in the StreamPipes Pipeline Editor, these requirements are verified against the connected event stream. -By using this feature, StreamPipes ensures that only pipeline elements can be connected that are syntactically and semantically valid. - -This guide covers the creation of stream requirements. Before reading this section, we recommend that you make yourself familiar with the SDK guide on [data processors](dev-guide-processor-sdk.md) and [data sinks](dev-guide-sink-sdk.md). - -
-
Code on Github
-

For all examples, the code can be found on Github.

-
- -## The StreamRequirementsBuilder - -Stream requirements can be defined in the ``declareModel`` method of the pipeline element class. Start with a method body like this: - -```java - -@Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION) - .requiredStream(StreamRequirementsBuilder. - create() - - .build()) - - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - .outputStrategy(OutputStrategies.keep()) - - .build(); - } -``` - -The ``StreamRequirementsBuilder`` class provides methods to add stream requirements to a pipeline element. - -## Requirements on primitive fields - -As a very first example, let's assume we would like to create a data processor that filters numerical values that are above a given threshold. -Consequently, any data stream that is connected to the filter processor needs to provide a numerical value. - -The stream requirement would be assigned as follows: - -```java -@Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION) - .requiredStream(StreamRequirementsBuilder - .create() - .requiredProperty(EpRequirements.numberReq()) - .build()) - - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - .outputStrategy(OutputStrategies.keep()) - - .build(); - } -``` - -Note the line starting with ``requiredProperty``, which requires any stream to provide a datatype of type ``number``. - -In many cases, you'll want to let the user select a specific field from a data stream from all available fields that match the specified requirement. For that, you simply use the method ``requiredPropertyWithUnaryMapping`` as follows: - -```java -@Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION) - .requiredStream(StreamRequirementsBuilder - .create() - .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(), - Labels.from("number-mapping", "The value that should be filtered", ""), PropertyScope.NONE) - .build()) - - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - .outputStrategy(OutputStrategies.keep()) - - .build(); - } -``` - -See also the developer guide on [static properties](extend-sdk-static-properties) to better understand the usage of ``MappingProperties``. - -Requirements on primitive fields can be specified for all common datatypes: - -```java - @Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.requirements" + - ".simple", "Simple requirements specification examples", "") - .requiredStream(StreamRequirementsBuilder. - create() - .requiredProperty(EpRequirements.numberReq()) // any number - .requiredProperty(EpRequirements.doubleReq()) // any field of type double - .requiredProperty(EpRequirements.booleanReq()) // any field of type boolean - .requiredProperty(EpRequirements.integerReq()) // any field of type integer - .requiredProperty(EpRequirements.stringReq()) // any field of type string - - .requiredProperty(EpRequirements.anyProperty()) // any field allowed (no restriction) - .requiredProperty(EpRequirements.timestampReq()) // any timestamp field - .build()) - - - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - .outputStrategy(OutputStrategies.keep()) - - .build(); -``` - -### Specifying semantics - -For some algorithms, only specifying the datatype is not sufficient. Let's consider a geofencing algorithm that detects the precense some geospatial coordinate (e.g., from a vehicle) within a given location. - -You could specify something like this: - -```java - StreamRequirementsBuilder - .create() - .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE) - .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE) - .build() -``` - -However, this would allow users to create strange pipelines as users could connect any stream containing a double value to our geofencing algorithm. -To avoid such situations, you can also specify requirements based on the semantics of a field: - -```java - StreamRequirementsBuilder - .create() - .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Latitude), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE) - .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Longitude), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE) - .build() -``` - -Note that in this case, we make use of Schema.org's ``Latitude`` concept ([https://schema.org/latitude](https://schema.org/latitude)). StreamPipes already includes popular vocabularies for specifying semantics. You are also free to use your own vocabularies. - - -## Requirements on lists - -Similarly to primitive requirements, you can define processors that require data streams with list fields, see the following examples: - -```java -@Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.examples.requirements" + - ".list", "List requirements specification examples", "") - .requiredStream(StreamRequirementsBuilder. - create() - .requiredProperty(EpRequirements.listRequirement(Datatypes.Integer)) - .requiredProperty(EpRequirements.listRequirement(Datatypes.Double)) - .requiredProperty(EpRequirements.listRequirement(Datatypes.Boolean)) - .requiredProperty(EpRequirements.listRequirement(Datatypes.String)) - .build()) - - - .supportedProtocols(SupportedProtocols.kafka()) - .supportedFormats(SupportedFormats.jsonFormat()) - .outputStrategy(OutputStrategies.keep()) - - .build(); - } -``` - -## Requirements on nested properties - -(coming soon, see the Javadoc for now) - - - diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-setup.md b/website-v2/versioned_docs/version-0.70.0/06_extend-setup.md deleted file mode 100644 index bea5d2c5f..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-setup.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -id: extend-setup -title: Development Setup -sidebar_label: Development Setup -original_id: extend-setup ---- - -Pipeline elements in StreamPipes are provided as standalone microservices. New pipeline elements can be easily developed using the provided Maven archetypes and can be installed in StreamPipes at runtime. - -In this section, we describe our recommended minimum setup for locally setting up a development instance of StreamPipes needed to develop, run and test new pipeline elements. - -## IDE & required dev tools -StreamPipes does not have specific requirements on the IDE - so feel free to choose the IDE of your choice. -The only requirements in terms of development tools are that you have Java 8 and Maven installed. - -## StreamPipes CLI: Docker-based local StreamPipes instance -In order to quickly test developed pipeline elements without needing to install all services required by StreamPipes, we provide a CLI tool that allows you to selectively start StreamPipes components. -The CLI tool allows to switch to several templates (based on docker-compose) depending on the role. - -The documentation on the usage of the CLI tool is available [here](06_extend-cli.md). - -## Override the SP_HOST variable - -By default, the backend/core of StreamPipes registers itself within StreamPipes' service discovery mechanism using an auto-discovered hostname. -Usually, this will be an IP address from the Docker network, which is not resolvable from outside. Therefore, for local development you need to override the hostname with an IP address which is accessible from your local host where you develop extensions. -When using the CLI, open the CLI folder ``installer/cli``, navigate to ``deploy/standalone/backend``, open the ``docker-compose.dev.yml`` file and add the SP_HOST env variable, e.g. - -``` -version: "3.4" -services: - backend: - ports: - - "8030:8030" - environment: - - SP_HOST=host.docker.internal -``` - -Note that host.docker.internal will work as an alias under Docker for Desktop on Windows and Mac, but not on Linux or M1. In this case, provide a resolvable hostname or IP address manually. - -## Starter projects - -Now, once you've started the development instance, you are ready to develop your very first pipeline element. -Instead of starting from scratch, we recommend using our provided maven archetypes: - -### Maven archetypes - -Create the Maven archetype as described in the [Maven Archetypes](06_extend-archetypes.md) guide. - -### Examples - -We provide several examples that explain the usage of some concepts in this [Github repo](https://github.com/apache/incubator-streampipes-examples). diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-processors.md b/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-processors.md deleted file mode 100644 index ce938a3f9..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-processors.md +++ /dev/null @@ -1,363 +0,0 @@ ---- -id: extend-tutorial-data-processors -title: "Tutorial: Data Processors" -sidebar_label: "Tutorial: Data Processors" -original_id: extend-tutorial-data-processors ---- - -In this tutorial, we will add a new data processor. - -From an architectural point of view, we will create a self-contained service that includes the description of the data processor and an implementation. - -## Objective - -We are going to create a new data processor that realizes a simple geofencing algorithm - we detect vehicles that enter a specified radius around a user-defined location. -This pipeline element will be a generic element that works with any event stream that provides geospatial coordinates in form of a latitude/longitude pair. - -The algorithm outputs every location event once the position has entered the geofence. - - -
-
Note
-

The implementation in this tutorial is pretty simple - our processor will fire an event every time the GPS location is inside the geofence. - In a real-world application, you would probably want to define a pattern that recognizes the _first_ event a vehicle enters the geofence.
- This can be easily done using a CEP library.

-
- - -## Project setup - -Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project skeleton (streampipes-archetype-extensions-jvm). -Enter the following command in a command line of your choice (Apache Maven needs to be installed): - -``` -mvn archetype:generate \ --DarchetypeGroupId=org.apache.streampipes -DarchetypeArtifactId=streampipes-archetype-extensions-jvm \ --DarchetypeVersion=0.70.0 -DgroupId=my.groupId \ --DartifactId=my-example -DclassNamePrefix=MyExample -DpackageName=mypackagename -``` - -You will see a project structure similar to the structure shown in the [archetypes](06_extend-archetypes.md) section. - -
-
Tip
-

Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.

-
- -Now you're ready to create your first data processor for StreamPipes! - -## Adding data processor requirements - -First, we will add a new stream requirement. -Create a new class `GeofencingProcessor` which should look as follows: - -```java -package org.apache.streampipes.pe.example; - -import org.apache.streampipes.commons.exceptions.SpRuntimeException; -import org.apache.streampipes.model.DataProcessorType; -import org.apache.streampipes.model.graph.DataProcessorDescription; -import org.apache.streampipes.model.runtime.Event; -import org.apache.streampipes.sdk.builder.ProcessingElementBuilder; -import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder; -import org.apache.streampipes.sdk.helpers.EpRequirements; -import org.apache.streampipes.sdk.helpers.Labels; -import org.apache.streampipes.sdk.helpers.Locales; -import org.apache.streampipes.sdk.helpers.OutputStrategies; -import org.apache.streampipes.sdk.utils.Assets; -import org.apache.streampipes.wrapper.context.EventProcessorRuntimeContext; -import org.apache.streampipes.wrapper.routing.SpOutputCollector; -import org.apache.streampipes.wrapper.standalone.ProcessorParams; -import org.apache.streampipes.wrapper.standalone.StreamPipesDataProcessor; - -public class GeofencingProcessor extends StreamPipesDataProcessor { - - private static final String LATITUDE_CENTER = "latitude-center"; - private static final String LONGITUDE_CENTER = "longitude-center"; - - @Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.apache.streampipes.tutorial-geofencing") - .category(DataProcessorType.ENRICH) - .withAssets(Assets.DOCUMENTATION, Assets.ICON) - .build(); - } - - @Override - public void onInvocation(ProcessorParams parameters, SpOutputCollector spOutputCollector, EventProcessorRuntimeContext runtimeContext) throws SpRuntimeException { - - } - - @Override - public void onEvent(Event event, SpOutputCollector collector) throws SpRuntimeException { - - } - - @Override - public void onDetach() throws SpRuntimeException { - - } -} - - -``` - -In this class, we need to implement three methods: The `declareModel` method is used to define abstract stream requirements such as event properties that must be present in any input stream that is later connected to the element using the StreamPipes UI. -The second method, `onInvocation` is triggered once a pipeline is started. Finally, the `onEvent` method - -Similar to data sources, the SDK provides a builder class to generate the description for data processors. -Delete the content within the ``declareModel`` method and add the following lines to the `declareModel` method: - -```java -return ProcessingElementBuilder.create("org.apache.streampipes.tutorial.geofencing", "Geofencing", "A simple geofencing data processor") -``` - -This creates a new data processor with the ID, title and description assigned to the element builder. -Next, we add some _stream requirements_ to the description. As we'd like to develop a generic pipeline element that works with any event that provides a lat/lng pair, we define two stream requirements as stated below: - -```java -.requiredStream(StreamRequirementsBuilder - .create() - .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat), - Labels.from("latitude-field", "Latitude", "The event " + - "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY) - .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng), - Labels.from("longitude-field", "Longitude", "The event " + - "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY) - .build()) -``` - -The first line, `.requiredStream()` defines that we want a data processor with exactly one input stream. Adding more stream requirements would create elements with multiple input connectors in StreamPipes. -Stream requirements can be assigned by using the `StreamRequirementsBuilder` class. -In our example, we define two requirements, so-called _domain property requirements_. In contrast to _data type requirements_ where we'd expect an event property with a field of a specific data type (e.g., float), domain property requirements expect a specific domain property, e.g., from a vocabulary such as the WGS84 Geo vocab. - -Once a pipeline is deployed, we are interested in the actual field (and its field name) that contains the latitude and longitude values. -In some cases, there might me more than one field that satisfies a property requirement and we would like users to select the property the geofencing component should operate on. -Therefore, our example uses the method `requiredPropertyWithUnaryMapping`, which will map a requirement to a real event property of an input stream and let the user choose the appropriate field in the StreamPipes UI when pipelines are defined. - -Finally, the `PropertyScope` indicates that the required property is a measurement value (in contrast to a dimension value). This allows us later to provide improved user guidance in the pipeline editor. - -Besides requirements, users should be able to define the center coordinate of the Geofence and the size of the fence defined as a radius around the center in meters. -The radius can be defined by adding a simple required text field to the description: - -```java -.requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.", 0, 1000, 1) -``` - -Similar to mapping properties, text parameters have an internalId (radius), a label and a description. -In addition, we can assign a _value specification_ to the parameter indicating the value range we support. -Our example supports a radius value between 0 and 1000 with a granularity of 1. -In the StreamPipes UI, a required text parameter is rendered as a text input field, in case we provide an optional value specification, a slider input is automatically generated. - -Such user-defined parameters are called _static properties_. There are many different types of static properties (see - the [Processor SDK](06_extend-sdk-static-properties.md) for an overview). - -In this example, we'll further add two very simple input fields to let users provide latitude and longitude of the geofence center. - -Add the following line to the `declareModel` method: - -```java - .requiredFloatParameter(Labels.from(LATITUDE_KEY, "Latitude", "The latitude value")) - .requiredFloatParameter(Labels.from(LONGITUDE_KEY, "Longitude", "The longitude value")) - -``` - -Now we need to define the output of our Geofencing pipeline element. -As explained in the first section, the element should fire every time some geo-located entity arrives within the defined geofence. -Therefore, the processor outputs the same schema as it receives as an input. -Although we don't know the exact input right now as it depends on the stream users connect in StreamPipes when creating pipelines, we can define an _output strategy_ as follows: - -```java -.outputStrategy(OutputStrategies.keep()) -``` - -This defines a _KeepOutputStrategy_, i.e., the input event schema is not modified by the processor. -There are many more output strategies you can define depending on the functionality you desire, e.g., _AppendOutput_ for defining a processor that enriches events or _CustomOutput_ in case you would like users to select the output by themselves. - -That's it! We've now defined input requirements, required user input and an output strategy. -In the next section, you will learn how to extract these parameters once the pipeline element is invoked after a pipeline was created. - -## Pipeline element invocation - -Once users start a pipeline that uses our geofencing component, the _onInvocation_ method in our class is called. The class `ProcessorParams` includes convenient access to user-configured parameters a users has selected in the pipeline editor and information on the acutal streams that are connected to the pipeline element. - -Next, we are interested in the fields of the input event stream that contains the latitude and longitude value we would like to compute against the geofence center location as follows: - -```java -String latitudeFieldName = parameters.extractor().mappingPropertyValue("latitude-field"); -String longitudeFieldName = parameters.extractor().mappingPropertyValue("longitude-field"); -``` - -We use the same `internalId` we've used to define the mapping property requirements in the `declareModel` method. - -Next, for extracting the geofence center coordinates, add to class variables centerLatitude and centerLongitude and assign the selected values using the following statements: - -```java -this.centerLatitude = parameters.extractor().singleValueParameter(LATITUDE_CENTER, Float.class); -this.centerLongitude = parameters.extractor().singleValueParameter(LONGITUDE_CENTER, Float.class); -``` - -The radius value can be extracted as follows: - -```java -int radius = parameters.extractor().singleValueParameter("radius", Float.class); -``` - -Great! That's all we need to describe a data processor for usage in StreamPipes. Your processor class should look as follows: - -```java -package org.apache.streampipes.pe.example; - -import org.apache.streampipes.commons.exceptions.SpRuntimeException; -import org.apache.streampipes.model.DataProcessorType; -import org.apache.streampipes.model.graph.DataProcessorDescription; -import org.apache.streampipes.model.runtime.Event; -import org.apache.streampipes.model.schema.PropertyScope; -import org.apache.streampipes.sdk.builder.ProcessingElementBuilder; -import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder; -import org.apache.streampipes.sdk.helpers.EpRequirements; -import org.apache.streampipes.sdk.helpers.Labels; -import org.apache.streampipes.sdk.helpers.Locales; -import org.apache.streampipes.sdk.helpers.OutputStrategies; -import org.apache.streampipes.sdk.utils.Assets; -import org.apache.streampipes.vocabulary.Geo; -import org.apache.streampipes.wrapper.context.EventProcessorRuntimeContext; -import org.apache.streampipes.wrapper.routing.SpOutputCollector; -import org.apache.streampipes.wrapper.standalone.ProcessorParams; -import org.apache.streampipes.wrapper.standalone.StreamPipesDataProcessor; - -public class GeofencingProcessor extends StreamPipesDataProcessor { - - private static final String LATITUDE_CENTER = "latitude-center"; - private static final String LONGITUDE_CENTER = "longitude-center"; - - private float centerLatitude; - private float centerLongitude; - private String latitudeFieldName; - private String longitudeFieldName; - - private int radius; - - @Override - public DataProcessorDescription declareModel() { - return ProcessingElementBuilder.create("org.streampipes.tutorial-geofencing") - .category(DataProcessorType.ENRICH) - .withAssets(Assets.DOCUMENTATION, Assets.ICON) - .withLocales(Locales.EN) - .requiredStream(StreamRequirementsBuilder - .create() - .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat), - Labels.from("latitude-field", "Latitude", "The event " + - "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY) - .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng), - Labels.from("longitude-field", "Longitude", "The event " + - "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY) - .build()) - .outputStrategy(OutputStrategies.keep()) - .requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.", 0, 1000, 1) - .requiredFloatParameter(Labels.from(LATITUDE_CENTER, "Latitude", "The latitude value")) - .requiredFloatParameter(Labels.from(LONGITUDE_CENTER, "Longitude", "The longitude value")) - .build(); - } - - @Override - public void onInvocation(ProcessorParams parameters, - SpOutputCollector spOutputCollector, - EventProcessorRuntimeContext runtimeContext) throws SpRuntimeException { - this.centerLatitude = parameters.extractor().singleValueParameter(LATITUDE_CENTER, Float.class); - this.centerLongitude = parameters.extractor().singleValueParameter(LONGITUDE_CENTER, Float.class); - this.latitudeFieldName = parameters.extractor().mappingPropertyValue("latitude-field"); - this.longitudeFieldName = parameters.extractor().mappingPropertyValue("longitude-field"); - this.radius = parameters.extractor().singleValueParameter("radius", Integer.class); - } - - @Override - public void onEvent(Event event, SpOutputCollector collector) throws SpRuntimeException { - - } - - @Override - public void onDetach() throws SpRuntimeException { - - } -} - - -``` - -## Adding an implementation - -Everything we need to do now is to add an implementation. - -Add the following piece of code to the onEvent method, which realizes the Geofencing functionality: - -```java - - @Override - public void onEvent(Event event, SpOutputCollector collector) throws SpRuntimeException { - float latitude = event.getFieldBySelector(latitudeFieldName).getAsPrimitive().getAsFloat(); - float longitude = event.getFieldBySelector(longitudeFieldName).getAsPrimitive().getAsFloat(); - - float distance = distFrom(latitude, longitude, centerLatitude, centerLongitude); - - if (distance <= radius) { - collector.collect(event); - } - } - - public static float distFrom(float lat1, float lng1, float lat2, float lng2) { - double earthRadius = 6371000; - double dLat = Math.toRadians(lat2-lat1); - double dLng = Math.toRadians(lng2-lng1); - double a = Math.sin(dLat/2) * Math.sin(dLat/2) + - Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) * - Math.sin(dLng/2) * Math.sin(dLng/2); - double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a)); - return (float) (earthRadius * c); - } - -} -``` - -We won't go into details here as this isn't StreamPipes-related code, but in general the class extracts latitude and longitude fields from the input event (which is provided as a map data type) and calculates the distance between the geofence center and these coordinates. -If the distance is below the given radius, the event is forwarded to the next operator. - -See the [event model](06_extend-sdk-event-model.md) guide to learn how to extract parameters from events. - -## Registering the pipeline element -The final step is to register the data processor in the `Init` method. Add the following line to the `SpServiceDefinitionBuilder`: - -```java - .registerPipelineElement(new GeofencingProcessor()) -``` - -## Starting the service -
-
Tip
-

Once you start the service, it will register in StreamPipes with the hostname. The hostname will be auto-discovered and should work out-of-the-box. -In some cases, the detected hostname is not resolvable from within a container (where the core is running). In this case, provide a SP_HOST environment variable to override the auto-discovery. -

-
- - -
-
Tip
-

The default port of all pipeline element services as defined in the `create` method is port 8090. - If you'd like to run multiple services at the same time on your development machine, change the port here. As an alternative, you can also provide an env variable `SP_PORT` which overrides the port settings. This is useful to use different configs for dev and prod environments. -

-
- -Now we are ready to start our service! - -Configure your IDE to provide an environment variable called ``SP_DEBUG`` with value ``true`` when starting the project. - -Execute the main method in the class `Init` we've just created. - -The service automatically registers itself in StreamPipes. -To install the just created element, open the StreamPipes UI and follow the manual provided in the [user guide](03_use-install-pipeline-elements.md). - -## Read more - -Congratulations! You've just created your first data processor for StreamPipes. -There are many more things to explore and data processors can be defined in much more detail using multiple wrappers. -Follow our [SDK guide](06_extend-sdk-static-properties.md) to see what's possible! diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-sinks.md b/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-sinks.md deleted file mode 100644 index a2bf82c6f..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-sinks.md +++ /dev/null @@ -1,231 +0,0 @@ ---- -id: extend-tutorial-data-sinks -title: "Tutorial: Data Sinks" -sidebar_label: "Tutorial: Data Sinks" -original_id: extend-tutorial-data-sinks ---- - -In this tutorial, we will add a new data sink using the standalone wrapper. - -From an architectural point of view, we will create a self-contained service that includes the description of the data sink and a corresponding implementation. - -## Objective - -We are going to create a new data sink that calls an external HTTP endpoint to forward data to an external service. - -For each incoming event, an external service is invoked using an HTTP POST request. In this example, we'll call an endpoint provided by [RequestBin](https://requestbin.com/). -To setup your own endpoint, go to [https://requestbin.com/](https://requestbin.com/) and click "Create a request bin". Copy the URL of the newly created endpoint. - - -## Project setup - -Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project skeleton (streampipes-archetype-extensions-jvm). -Enter the following command in a command line of your choice (Apache Maven needs to be installed): - -``` -mvn archetype:generate -DarchetypeGroupId=org.apache.streampipes \ --DarchetypeArtifactId=streampipes-archetype-extensions-jvm -DarchetypeVersion=0.70.0 \ --DgroupId=org.streampipes.tutorial -DartifactId=sink-tutorial -DclassNamePrefix=Rest -DpackageName=mypackage -``` - -You will see a project structure similar to the structure shown in the [archetypes](06_extend-archetypes.md) section. - -
-
Tip
-

Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.

-
- -Now you're ready to create your first data sink for StreamPipes! - -## Adding data sink requirements - -First, we will add a new stream requirement. -Create a class `RestSink` which should look as follows: - -```java -package org.apache.streampipes.pe.example; - -import org.apache.streampipes.commons.exceptions.SpRuntimeException; -import org.apache.streampipes.model.DataSinkType; -import org.apache.streampipes.model.graph.DataSinkDescription; -import org.apache.streampipes.model.runtime.Event; -import org.apache.streampipes.model.schema.PropertyScope; -import org.apache.streampipes.sdk.builder.DataSinkBuilder; -import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder; -import org.apache.streampipes.sdk.helpers.EpRequirements; -import org.apache.streampipes.sdk.helpers.Labels; -import org.apache.streampipes.sdk.helpers.Locales; -import org.apache.streampipes.sdk.utils.Assets; -import org.apache.streampipes.wrapper.context.EventSinkRuntimeContext; -import org.apache.streampipes.wrapper.standalone.SinkParams; -import org.apache.streampipes.wrapper.standalone.StreamPipesDataSink; - -public class RestSink extends StreamPipesDataSink { - - @Override - public DataSinkDescription declareModel() { - return DataSinkBuilder.create("org.apache.streampipes.tutorial.pe.sink.rest") - .category(DataSinkType.NOTIFICATION) - .withAssets(Assets.DOCUMENTATION, Assets.ICON) - .withLocales(Locales.EN) - .requiredStream(StreamRequirementsBuilder - .create() - .requiredPropertyWithNaryMapping(EpRequirements.anyProperty(), Labels.withId( - "fields-to-send"), PropertyScope.NONE) - .build()) - .build(); - } - - @Override - public void onInvocation(SinkParams parameters, EventSinkRuntimeContext runtimeContext) throws SpRuntimeException { - - } - - @Override - public void onEvent(Event event) throws SpRuntimeException { - - } - - @Override - public void onDetach() throws SpRuntimeException { - - } -} - - -``` - -In this class, we need to implement three methods: The `declareModel` method is used to define abstract stream requirements such as event properties that must be present in any input stream that is later connected to the element using the StreamPipes UI. -The second method, `onInvocation` is called once a pipeline using this sink is started. The third method, `onEvent`, is called for every incoming event. - -The ``declareModel`` method describes the properties of our data sink: -* ``category`` defines a category for this sink. -* ``withAssets`` denotes that we will provide an external documentation file and an icon, which can be found in the ``resources`` folder -* ``withLocales`` defines that we will provide an external language file, also available in the ``resources`` folder -* ``requiredStream`` defines requirements any input stream connected to this sink must provide. In this case, we do not have any specific requirements, we just forward all incoming events to the REST sink. However, we want to let the user display a list of available fields from the connected input event, where users can select a subset. This is defined by defining a Mapping from the empty requirement. This will later on render a selection dialog in the pipeline editor. - -The ``onInvocation`` method is called when a pipeline containing the sink is started. Once a pipeline is started, we would like to extract user-defined parameters. -In this example, we simply extract the fields selected by users that should be forwarded to the REST sink. Finally, we return a new configured event sink containing the parameters. - -## Pipeline element invocation - -Once users start a pipeline that uses our geofencing component, the _onInvocation_ method in our class is called. The class `SinkParams` includes a graph that contains information on the configuration parameters a users has selected in the pipeline editor and information on the acutal streams that are connected to the pipeline element. - - -## Adding an implementation - -Now we'll add a proper implementation (i.e., the Rest call executed for every incoming event) to the following methods: - -Our final class should look as follows: - -```java -package org.apache.streampipes.pe.example; - -import com.google.common.base.Charsets; -import org.apache.http.client.fluent.Request; -import org.apache.http.entity.StringEntity; -import org.apache.streampipes.commons.exceptions.SpRuntimeException; -import org.apache.streampipes.dataformat.SpDataFormatDefinition; -import org.apache.streampipes.dataformat.json.JsonDataFormatDefinition; -import org.apache.streampipes.model.DataSinkType; -import org.apache.streampipes.model.graph.DataSinkDescription; -import org.apache.streampipes.model.runtime.Event; -import org.apache.streampipes.model.schema.PropertyScope; -import org.apache.streampipes.sdk.builder.DataSinkBuilder; -import org.apache.streampipes.sdk.builder.StreamRequirementsBuilder; -import org.apache.streampipes.sdk.helpers.EpRequirements; -import org.apache.streampipes.sdk.helpers.Labels; -import org.apache.streampipes.sdk.helpers.Locales; -import org.apache.streampipes.sdk.utils.Assets; -import org.apache.streampipes.wrapper.context.EventSinkRuntimeContext; -import org.apache.streampipes.wrapper.standalone.SinkParams; -import org.apache.streampipes.wrapper.standalone.StreamPipesDataSink; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -public class RestSink extends StreamPipesDataSink { - - private static final Logger LOG = LoggerFactory.getLogger(RestSink.class); - - private static final String REST_ENDPOINT_URI = YOUR_REQUEST_BIN_URL; - private List fieldsToSend; - private SpDataFormatDefinition dataFormatDefinition; - - @Override - public DataSinkDescription declareModel() { - ... - } - - @Override - public void onInvocation(SinkParams parameters, EventSinkRuntimeContext runtimeContext) throws SpRuntimeException { - this.dataFormatDefinition = new JsonDataFormatDefinition(); - this.fieldsToSend = parameters.extractor().mappingPropertyValues("fields-to-send"); - - } - - @Override - public void onEvent(Event event) throws SpRuntimeException { - Map outEventMap = event.getSubset(fieldsToSend).getRaw(); - try { - String json = new String(dataFormatDefinition.fromMap(outEventMap)); - Request.Post(REST_ENDPOINT_URI).body(new StringEntity(json, Charsets.UTF_8)).execute(); - } catch (SpRuntimeException e) { - LOG.error("Could not parse incoming event"); - } catch (IOException e) { - LOG.error("Could not reach endpoint at {}", REST_ENDPOINT_URI); - } - } - - @Override - public void onDetach() throws SpRuntimeException { - - } -} - -``` -The only class variable you need to change right now is the REST_ENDPOINT_URL. Change this url to the URL provided by your request bin. -In the ``ònEvent`` method, we use a helper method to get a subset of the incoming event. -Finally, we convert the resulting ``Map`` to a JSON string and call the endpoint. - - -## Preparing the service -The final step is to register the sink as a pipeline element. - -Go to the class `Init` and register the sink: -```java -.registerPipelineElement(new RestSink()) -``` - -## Starting the service -
-
Tip
-

Once you start the service, it will register in StreamPipes with the hostname. The hostname will be auto-discovered and should work out-of-the-box. -In some cases, the detected hostname is not resolvable from within a container (where the core is running). In this case, provide a SP_HOST environment variable to override the auto-discovery. -

-
- - -
-
Tip
-

The default port of all pipeline element services as defined in the `create` method is port 8090. - If you'd like to run multiple services at the same time on your development machine, change the port here. As an alternative, you can also provide an env variable `SP_PORT` which overrides the port settings. This is useful to use different configs for dev and prod environments. -

-
- -Now we are ready to start our service! - -Configure your IDE to provide an environment variable called ``SP_DEBUG`` with value ``true`` when starting the project. - -Execute the main method in the class `Init` we've just created. The service automatically registers itself in StreamPipes. - -To install the created element, open the StreamPipes UI and follow the manual provided in the [user guide](03_use-install-pipeline-elements.md). - -## Read more - -Congratulations! You've just created your first data sink for StreamPipes. -There are many more things to explore and data sinks can be defined in much more detail using multiple wrappers. -Follow our [SDK guide](../dev-guide-sdk-guide-sinks) to see what's possible! diff --git a/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-sources.md b/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-sources.md deleted file mode 100644 index a2d95f094..000000000 --- a/website-v2/versioned_docs/version-0.70.0/06_extend-tutorial-data-sources.md +++ /dev/null @@ -1,214 +0,0 @@ ---- -id: extend-tutorial-data-sources -title: "Tutorial: Data Sources" -sidebar_label: "Tutorial: Data Sources" -original_id: extend-tutorial-data-sources ---- - -In this tutorial, we will add a new data source consisting of a single data stream. The source will be provided as a standalone component (i.e., the description will be accessible through an integrated web server). - -## Objective - -We are going to create a new data stream that is produced by a GPS sensor installed in a delivery vehicle. -The sensor produces a continuous stream of events that contain the current timestamp, the current lat/lng position of the vehicle and the plate number of the vehicle. -Events are published in a JSON format as follows: -```json -{ - "timestamp" : 145838399, - "latitude" : 37.04, - "longitude" : 17.04, - "plateNumber" : "KA-AB 123" -} -``` - -These events are published to a Kafka broker using the topic `org.streampipes.tutorial.vehicle`. - -In the following section, we show how to describe this stream in a form that allows you to import and use it in StreamPipes. - -## Project setup - -Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project skeleton (streampipes-archetype-extensions-jvm). -Enter the following command in a command line of your choice (Apache Maven needs to be installed): - -``` -mvn archetype:generate \ --DarchetypeGroupId=org.apache.streampipes -DarchetypeArtifactId=streampipes-archetype-extensions-jvm \ --DarchetypeVersion=0.69.0 -DgroupId=my.groupId \ --DartifactId=my-source -DclassNamePrefix=MySource -DpackageName=mypackagename -``` - -You will see a project structure similar to the structure shown in the [archetypes](06_extend-archetypes.md) section. - -
-
Tip
-

Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container. -

-
- -## Adding a data stream description - -Now we will add a new data stream definition. -First, create a new class `MyVehicleStream` which should look as follows: - -```java - -package org.apache.streampipes.pe.example; - -import org.apache.streampipes.model.SpDataStream; -import org.apache.streampipes.sources.AbstractAdapterIncludedStream; - -public class MyVehicleStream extends AbstractAdapterIncludedStream { - - @Override - public SpDataStream declareModel() { - return null; - } - - @Override - public void executeStream() { - - } -} -``` - -This class extends the class ``AbstractAdapterIncludedStream``, which indicates that this source continuously produces data (configured in the ``executeStream()`` method. -In contrast, the class `AbstractAlreadyExistingStream` indicates that we only want to describe an already existing stream (e.g., a stream that already sends data to an existing Kafka broker). - -Next, we will add the definition of the data stream. Add the following code inside of the `declareModel` method: -```java -return DataStreamBuilder.create("org.streampipes.tutorial.vehicle.position", "Vehicle Position", "An event stream " + - "that produces current vehicle positions") -``` - -This line creates a new instance of the SDK's `DataStreamBuilder` by providing three basic parameters: -The first parameter must be a unique identifier of your data stream. -The second and third parameters indicate a label and a description of your stream. -These values will later be used in the StreamPipes UI to display stream details in a human-readable manner. - -Next, we will add the properties as stated above to the stream definition by adding the following lines: -```java -.property(EpProperties.timestampProperty("timestamp")) -.property(EpProperties.stringEp(Labels.from("plate-number", "Plate Number", "Denotes the plate number of the vehicle"), "plateNumber", "http://my.company/plateNumber")) -.property(EpProperties.doubleEp(Labels.from("latitude", "Latitude", "Denotes the latitude value of the vehicle's position"), "latitude", Geo.lat)) -.property(EpProperties.doubleEp(Labels.from("longitude", "Longitude", "Denotes the longitude value of the vehicle's position"), "longitude", Geo.lng)) -``` -These four _event properties_ compose our _event schema_. An event property must, at least, provide the following attributes: - -* **Runtime Name**. The runtime name indicates the key of the property at runtime, e.g., if our JSON message contains a structure such as `{"plateNumber" : "KA-F 123"}`, the runtime name must be `plateNumber`. -* **Runtime Type**. An event property must have a primitive type (we will later see how to model more complex properties such as lists and nested properties). - The type must be an instance of `XMLSchema` primitives, however, the SDK provides convenience methods to provide the property type. -* **Domain Property**. The domain property indicates the semantics of the event property. For instance, the `latitude` property is linked to the `http://www.w3.org/2003/01/geo/wgs84_pos#lat` property of the WGS84 vocabulary. - The domain property should be an URI as part of an existing or domain-specific vocabulary. The SDK provides convenience methods for popuplar vocabularies (e.g., Schema.org, Dolce or WGS84). - -In order to complete the minimum required specification of an event stream, we need to provide information on the transport format and protocol of the data stream at runtime. - -This can be achieved by extending the builder with the respective properties: -```java -.format(Formats.jsonFormat()) -.protocol(Protocols.kafka("localhost", 9094, "TOPIC_SHOULD_BE_CHANGED")) -.build(); -``` - -Set ``org.streampipes.tutorial.vehicle`` as your new topic by replacing the term ``TOPIC_SHOULD_BE_CHANGED`. - -In this example, we defined that the data stream consists of events in a JSON format and that Kafka is used as a message broker to transmit events. -The last build() method call triggers the construction of the data stream definition. - -That's it! In the next section, we will connect the data stream to a source and inspect the generated RDF description. - -## Creating some dummy data - -Let's assume our stream should produce some random values that are sent to StreamPipes. We'll add a very simple data simulator to the ``executeStream`` method as follows: - -```java -@Override - public void executeStream() { - - SpKafkaProducer producer = new SpKafkaProducer("localhost:9094", "my-topic", Collections.emptyList()); - Random random = new Random(); - Runnable runnable = () -> { - for (;;) { - JsonObject jsonObject = new JsonObject(); - jsonObject.addProperty("timestamp", System.currentTimeMillis()); - jsonObject.addProperty("plateNumber", "KA-FZ 1"); - jsonObject.addProperty("latitude", random.nextDouble()); - jsonObject.addProperty("longitude", random.nextDouble()); - - producer.publish(jsonObject.toString()); - - try { - TimeUnit.SECONDS.sleep(1); - } catch (InterruptedException e) { - e.printStackTrace(); - } - - } - }; - - new Thread(runnable).start(); - } -``` - -Change the topic and the URL of your Kafka broker (as stated in the controller). - -## Registering the data stream - -You need to register the stream in the service definition. Open the ``Init`` class and register the ``MyVehicleStream``: - -```java - - @Override - public SpServiceDefinition provideServiceDefinition() { - return SpServiceDefinitionBuilder.create("org.apache.streampipes", - "human-readable service name", - "human-readable service description", 8090) - .registerPipelineElement(new ExampleDataProcessor()) - .registerPipelineElement(new ExampleDataSink()) - .registerPipelineElement(new MyVehicleStream()) - .registerMessagingFormats( - new JsonDataFormatFactory(), - new CborDataFormatFactory(), - new SmileDataFormatFactory(), - new FstDataFormatFactory()) - .registerMessagingProtocols( - new SpKafkaProtocolFactory(), - new SpJmsProtocolFactory(), - new SpMqttProtocolFactory()) - .build(); - } - -``` - -You can remove the other two example classes if you want. - -## Starting the service - -
-
Tip
-

Once you start the service, it will register in StreamPipes with the hostname. The hostname will be auto-discovered and should work out-of-the-box. -In some cases, the detected hostname is not resolvable from within a container (where the core is running). In this case, provide a SP_HOST environment variable to override the auto-discovery. -

-
- -Now we are ready to start our first container! - -Execute the main method in the class `Init`, open a web browser and navigate to http://localhost:8090, or change the port according to the value of the ``SP_PORT`` variable in the env file. - -Configure your IDE to provide an environment variable called ``SP_DEBUG`` with value ``true`` when starting the project. - -You should see something as follows: - -Pipeline Element Container Overview - -Click on the link of the data source to see the generated description of the pipeline element. - -Pipeline Element description - -The container automatically registers itself in StreamPipes. - -To install the just created element, open the StreamPipes UI and install the source over the ``Install Pipeline Elements`` section. - -## Read more - -Congratulations! You've just created your first pipeline element for StreamPipes. -There are many more things to explore and data sources can be defined in much more detail. diff --git a/website-v2/versioned_docs/version-0.70.0/07_technicals-architecture.md b/website-v2/versioned_docs/version-0.70.0/07_technicals-architecture.md deleted file mode 100644 index 4ef1a54f4..000000000 --- a/website-v2/versioned_docs/version-0.70.0/07_technicals-architecture.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -id: technicals-architecture -title: Architecture -sidebar_label: Architecture -original_id: technicals-architecture ---- - - -The following picture illustrates the high-level architecture of StreamPipes: - -High Level Architecture of StreamPipes - -Users mainly interact (besides other UI components) with the _Pipeline Editor_ to create stream processing pipelines based on data streams, data processors and data sinks. -These reusable pipeline elements are provided by self-contained _pipeline element containers_, each of them having a semantic description that specifies their characteristics (e.g., input, output and required user input for data processors). -Each pipeline element container has a REST endpoint that provides these characteristics as a JSON-LD document. - -Pipeline element containers are built using one of several provided _wrappers_. -Wrappers abstract from the underlying runtime stream processing framework. -Currently, the StreamPipes framework provides wrappers for Apache Flink, Esper and algorithms running directly on the JVM. - -The _pipeline manager_ manages the definition and execution of pipelines. -When creating pipelines, the manager continuously matches the pipeline against its semantic description and provides user guidance in form of recommendations. -Once a pipeline is started, the pipeline manager invokes the corresponding pipeline element containers. -The container prepares the actual execution logic and submits the program to the underlying execution engine, e.g., the program is deployed in the Apache Flink cluster. - -Pipeline elements exchange data using one or more message brokers and protocols (e.g., Kafka or MQTT). -StreamPipes does not rely on a specific broker or message format, but negotiates suitable brokers based on the capabilities of connected pipeline elements. - -Thus, StreamPipes provides a higher-level abstraction of existing stream processing technology by leveraging domain experts to create streaming analytics pipelines in a self-service manner. - -## Semantic description -Pipeline elements in StreamPipes are meant to be reusable: - -* Data processors and data sink are generic (or domain-specific) elements that express their requirements and are able to operate on any stream that satisfies these requirements. -* Data processors and data sinks can be manually configured by offering possible configuration parameters which users can individually define when creating pipelines. -* Data streams can be connected to any data processor or data sink that matches the capabilities of the stream. - -When users create pipelines by connecting a data stream with a data processor (or further processors), the pipeline manager _matches_ the input stream of a data processor against its requirements. -This matching is performed based on the _semantic description of each element. -The semantic description (technically an RDF graph serialized as JSON-LD) can be best understood by seeing it as an envelope around a pipeline element. -It only provides metadata information, while we don't rely on any RDF at runtime for exchanging events between pipeline elements. -While RDF-based metadata ensures good understanding of stream capabilities, lightweight event formats at runtime (such as JSON or Thrift) ensure fast processing of events. - -Let's look at an example stream that produces a continuous stream of vehicle positions as illustrated below: - -Semantic description of data streams - -While the runtime layer produces plain JSON by submitting actual values of the position and the vehicle's plate number, the description layer describes various characteristics of the stream: -For instance, it defines the event schema (including, besides the data type and the runtime name of each property also a more fine-grained meaning of the property), quality aspects (e.g., the measurement unit of a property or the frequency) and the grounding (e.g., the format used at runtime and the communication protocol used for transmitting events). - -The same accounts for data processors and data sinks: - -Semantic description of data processor - -Data processors (and, with some differences, data sinks) are annotated by providing metadata information on their required input and output. -For instance, we can define minimum schema requirements (such as geospatial coordinates that need to be provided by any stream that is connected to a processor), but also required (minimum or maximum) quality levels and supported transport protocols and formats. -In addition, required configuration parameters users can define during the pipeline definition process are provided by the semantic description. - -Once new pipeline elements are imported into StreamPipes, we store all information provided by the description layer in a central repository and use this information to guide useres through the pipeline definition process. - -Don't worry - you will never be required to model RDF by yourself. -Our SDK provides convenience methods that help creating the description automatically. - diff --git a/website-v2/versioned_docs/version-0.70.0/07_technicals-configuration.md b/website-v2/versioned_docs/version-0.70.0/07_technicals-configuration.md deleted file mode 100644 index 459909ac6..000000000 --- a/website-v2/versioned_docs/version-0.70.0/07_technicals-configuration.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -id: technicals-configuration -title: Configuration -sidebar_label: Configuration -original_id: technicals-configuration ---- - -On this page we explain how the StreamPipes configuration works. -StreamPipes allows the individual services (pipeline element containers and third-party services) to store configuration parameters in a distributed key-value store. -This has the advantage that individual services do not need to store any configurations on the local file system, enabling us to run containers anywhere. -As a key-value store we use [Consul](https://www.consul.io/), which is an essential service for all our services. - -Semantic description of data processor - - -## Edit Configurations -All services in StreamPipes can have configuration parameters. -You can either change them in the consul user interface (which is by default running on port 8500) or directly in the StreamPipes Configurations Page. -Once a new pipeline element container is started, it is registered in Consul and the parameters can be edited in the configuration page, as shown below. -To store changes in Consul, the update button must be clicked. - - - -## Configuration for Developers -We provide a Configurations API for the use of configuration parameters in your services. -Each processing element project has a “config” package [[Example]](https://github.com/apache/streampipes-extensions/tree/dev/streampipes-sinks-internal-jvm/src/main/java/org/streampipes/sinks/internal/jvm/config). -This package usually contains two classes. -One containing unique keys for the configuration values and one containing the getter and setter methods to access these values. -For the naming of configuration keys, we recommend to use “SP” as a prefix. -As we explain later, it is possible to set default configurations as environment variables, this prefix makes them unique on your server. -A configuration entry needs a unique config key. For this key, a value can be specified containing the configuration, like for example the port number of the service. -For each configuration, a description explaining the parameter can be provided, further the data type must be specified and whether it is a password or not. -Below, the schema of a configuration item is shown on the left and an example of a port configuration on the right. - -Semantic description of data processor - -As a developer, you can add as many new configurations to services as you wish, but there are some that are required for all processing element containers. -Those are **the host**, **the port**, and **the name** of the service. - -## Default Values -You can provide default values for the configurations, which are used when a configuration is read for the first time. -The first option is to register a configuration parameter in the Config class. -This is a fallback value, which is used if nothing else is defined. -Since this value is static, we offer a second option. -It is possible to provide a default value by setting an environment variable. -In this case, the convention is that the key of a configuration parameter must be used as the environment variable. -Now, this value is used instead of the value defined in the Config class. -During development, the configuration values often need to be changed for debugging purposes, therefore we provide an .env file in all processing element projects and archetypes. -This file can be used by your IDE to set the environment variables. (e.g., [Intellij Plugin](https://plugins.jetbrains.com/plugin/7861-envfile)) -When you need to change the variable at runtime, you can do this in the StreamPipes configurations as explained before. -Those changes take effect immediately without the need of a container restart. - -
-
Installed pipeline elements
-

Be cautious, when the configuration is used in the semantic description of a processing element which is already installed in StreamPipes, you have to reload this element in StreamPipes (my elements -> reload). - In addition, changes might affect already running pipelines.

-
diff --git a/website-v2/versioned_docs/version-0.70.0/07_technicals-messaging.md b/website-v2/versioned_docs/version-0.70.0/07_technicals-messaging.md deleted file mode 100644 index 64d9a2ef8..000000000 --- a/website-v2/versioned_docs/version-0.70.0/07_technicals-messaging.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: technicals-messaging -title: Messaging -sidebar_label: Messaging -original_id: technicals-messaging ---- - -tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/07_technicals-runtime-wrappers.md b/website-v2/versioned_docs/version-0.70.0/07_technicals-runtime-wrappers.md deleted file mode 100644 index dedc3ee18..000000000 --- a/website-v2/versioned_docs/version-0.70.0/07_technicals-runtime-wrappers.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: technicals-runtime-wrappers -title: Runtime Wrappers -sidebar_label: Runtime Wrappers -original_id: technicals-runtime-wrappers ---- - -tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/07_technicals-user-guidance.md b/website-v2/versioned_docs/version-0.70.0/07_technicals-user-guidance.md deleted file mode 100644 index 0141dabc1..000000000 --- a/website-v2/versioned_docs/version-0.70.0/07_technicals-user-guidance.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: technicals-user-guidance -title: User Guidance -sidebar_label: User Guidance -original_id: technicals-user-guidance ---- - -tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/08_debugging.md b/website-v2/versioned_docs/version-0.70.0/08_debugging.md deleted file mode 100644 index 33bedad41..000000000 --- a/website-v2/versioned_docs/version-0.70.0/08_debugging.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: debugging-debugging -title: Debugging -sidebar_label: Debugging -original_id: debugging-debugging ---- - -tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/08_monitoring.md b/website-v2/versioned_docs/version-0.70.0/08_monitoring.md deleted file mode 100644 index 0712b98e9..000000000 --- a/website-v2/versioned_docs/version-0.70.0/08_monitoring.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -id: debugging-monitoring -title: Monitoring -sidebar_label: Monitoring -original_id: debugging-monitoring ---- - -tbd \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/09_contribute.md b/website-v2/versioned_docs/version-0.70.0/09_contribute.md deleted file mode 100644 index 43ea625f3..000000000 --- a/website-v2/versioned_docs/version-0.70.0/09_contribute.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -id: community-contribute -title: Contribute -sidebar_label: Contribute -original_id: community-contribute ---- - -## Contribute - -We welcome contributions to StreamPipes. If you are interested in contributing to StreamPipes, let us know! You'll -get to know an open-minded and motivated team working together to build the next IIoT analytics toolbox. - -Here are some first steps in case you want to contribute: -* Subscribe to our dev mailing list [dev-subscribe@streampipes.apache.org](mailto:dev-subscribe@streampipes.apache.org) -* Send an email, tell us about your interests and which parts of Streampipes you'd like to contribute (e.g., core or UI)! -* Ask for a mentor who helps you to understand the code base and guides you through the first setup steps -* Find an issue on [GitHub](https://github.com/apache/streampipes/issues) which is tagged with a _good first issue_ tag -* Have a look at our **developer wiki** at [https://cwiki.apache.org/confluence/display/STREAMPIPES](https://cwiki.apache.org/confluence/display/STREAMPIPES) to learn more about StreamPipes development. diff --git a/website-v2/versioned_docs/version-0.70.0/09_get-help.md b/website-v2/versioned_docs/version-0.70.0/09_get-help.md deleted file mode 100644 index 0e564825f..000000000 --- a/website-v2/versioned_docs/version-0.70.0/09_get-help.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -id: community-get-help -title: Get Help -sidebar_label: Get Help -original_id: community-get-help ---- - -The Apache StreamPipes community is happy to help with any questions or problems you might have. - -## Questions -Subscribe to our user mailing list to ask a question. - -[Mailing Lists](https://streampipes.apache.org/mailinglists.html) - -To subscribe to the user list, send an email to [users-subscribe@streampipes.apache.org](mailto:users-subscribe@streampipes.apache.org) - -You can also ask questions on our Github discussions page: -[Github Discussions](https://github.com/apache/streampipes/discussions) - -## Bugs and Feature Requests - -If you've found a bug or have a feature that you'd love to see in StreamPipes, feel free to create an issue on [GitHub](https://github.com/apache/streampipes/issues) -or [discuss your ideas](https://github.com/apache/streampipes/discussions/categories/ideas). - - - diff --git a/website-v2/versioned_docs/version-0.70.0/dev-guide-archetype.md b/website-v2/versioned_docs/version-0.70.0/dev-guide-archetype.md deleted file mode 100644 index 6b2486911..000000000 --- a/website-v2/versioned_docs/version-0.70.0/dev-guide-archetype.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -id: dev-guide-archetype -title: Start Developing -sidebar_label: Start Developing -original_id: dev-guide-archetype ---- - diff --git a/website-v2/versioned_docs/version-0.70.0/dev-guide-processor-sdk.md b/website-v2/versioned_docs/version-0.70.0/dev-guide-processor-sdk.md deleted file mode 100644 index 5ceca4bd7..000000000 --- a/website-v2/versioned_docs/version-0.70.0/dev-guide-processor-sdk.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -id: dev-guide-sdk-guide-processors -title: "SDK Guide: Data Processors" -sidebar_label: "SDK Guide: Data Processors" -original_id: dev-guide-sdk-guide-processors ---- - -## Project Setup -(coming soon, please check the [tutorial](../dev-guide-tutorial-processors) to learn how to define data processors) - -## SDK reference -The complete SDK reference for defining data processors will follow soon. Please check the SDK's Javadoc for now! diff --git a/website-v2/versioned_docs/version-0.70.0/dev-guide-sink-sdk.md b/website-v2/versioned_docs/version-0.70.0/dev-guide-sink-sdk.md deleted file mode 100644 index d2e253441..000000000 --- a/website-v2/versioned_docs/version-0.70.0/dev-guide-sink-sdk.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -id: dev-guide-sdk-guide-sinks -title: "SDK Guide: Data Sinks" -sidebar_label: "SDK Guide: Data Sinks" -original_id: dev-guide-sdk-guide-sinks ---- - -## Project Setup -(coming soon, please check the [tutorial](../dev-guide-tutorial-processors) to learn how to define sinks) - -## SDK reference -The complete SDK reference for defining data sinks will follow soon. Please check the SDK's Javadoc for now! \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/faq-common-problems.md b/website-v2/versioned_docs/version-0.70.0/faq-common-problems.md deleted file mode 100644 index c7a61f147..000000000 --- a/website-v2/versioned_docs/version-0.70.0/faq-common-problems.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -id: faq-common-problems -title: Common Problems -sidebar_label: Common Problems -original_id: faq-common-problems ---- - -* Windows 10: Consul, Kafka, Zookeeper, or Kafka-Rest did not start -* Linux / OSX: Consul does not start -* Run StreamPipes in a VM in Windows -* Only few processors are available in the pipeline editor -* No data is shown in the live dashbord -* Windows 10: Should I use settings windows containers or docker containers? -* Configurations are not deleted -* Help us to improve StreamPipes and this documentation -* Docker Network already used - -## Windows 10: Consul, Kafka, Zookeeper, or Kafka-Rest did not start -**Problem:** You get an error message similar to: `ERROR: for consul Cannot start service consul: b'driver failed programming external connectivity on endpoint sp-test_consul_1 (eae0457fc03c1364b8e81a6e155ca4b95ee1e1d01bb3c1aa9dd5192bdcb7b91a): Error starting userland proxy: mkdir /port/tcp:0.0.0.0:8600:tcp:172.30.0.9:8600: input/output error` - -**Solution:** To resolve this problem, stop StreamPipes with `streampipes stop` and restart Docker via the Docker settings in the task bar. -After Docker was restarted, run `streampipes start`. - -## Consul does not start -**Problem:** After starting StreamPipes with `streampipes start`, there is an error with Consul: - -**Solution:** To resolve this, execute `streampipes stop`, wait a minute and start it again with `streampipes start`. If you've installed an old version of StreamPipes (before the installer was available), make sure that no network suffixed with `spnet` exists in Docker. Type `docker network ls` to check and `docker network rm NETWORK_NAME` to remove the existing network before running the installer. - -## Run StreamPipes in a VM in Windows -**Problem:** StreamPipes does not work properly with Docker under Windows 8 or earlier versions. - -**Solution:** We do support virtual machines (VMs), but if you run them under Windows, there might be problems with docker and its network configurations. -Please use Windows 10, OSX or Linux. -You can also use a VM from a cloud provider to test StreamPipes. - - -## Only few processors are available in the pipeline editor -**Problem:** In the Pipeline Editor, only a few processors can be used in pipelines. - -**Solution:** In the demo/desktop version, we only integrated a few processors. To ensure that you can easily try out StreamPipes (even on your laptop), - we tried to make it as lightweight as possible. If you are interested in more sophisticated algorithms, pleas contact us. - - -## No data is shown in the live dashboard -**Problem:** The live dashboard does not show any data. - -**Solution:** If this is the case, your IP is probably configured wrong. -You can reinstall the system by running `streampipes clean` and then `streampipes start` again. -This will delete all StreamPipes configurations. StreamPipes is designed as a server application and requires a fixed IP. -We created a version to easily run it on your laptop and test it, but on your laptop you usually get a new IP when you change the network. -This problem only occurs in testing scenarios, in production scenarios the IP can also be changed manually without data loss. - -## Windows 10: Should I use settings windows containers or docker containers -**Problem:** StreamPipes does not work with Windows 10. - -**Solution:** You should use docker containers. Go to the docker settings on our taks bar and select 'Switch to Docker containers'. - -## Configurations are not deleted -**Problem:** The configurations are not deleted from the host system. Even after manually removing the 'config/' folder StreamPipes settings are note deleted. -Also the Consul settings are still there. - -**Solution:** Probably Docker did not mount a volume in the 'config/' folder. You must delete the anonymous docker volumes manually. See in docker [documentation](https://docs.docker.com/engine/reference/commandline/volume_rm/). - - -## Docker Network already used -**Problem:** When starting StreamPipes the error message: "Creating network 'streampipes-cli_spnet' with driver 'bridge' Pool overlaps with other one on this address space" is shown. - -**Solution:** Delete old networks for example with "docker network prune". - -## Help us to improve StreamPipes and this documentation -Help us to improve this section. -If you have any problems with the system or with the documentation, do not hesitate to contact us. -Our goal is to continuously improve StreamPipes. -Your help and feedback is welcome. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.coindesk.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.coindesk.md deleted file mode 100644 index 7d5e9b8e8..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.coindesk.md +++ /dev/null @@ -1,45 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.coindesk -title: Coindesk Bitcoin Price -sidebar_label: Coindesk Bitcoin Price -original_id: org.apache.streampipes.connect.adapters.coindesk ---- - - - - - -

- -

- -*** - -## Description -This adapter continuously provides the current bitcoin price from the Coindesk API. - -## Configuration - -### Currency - -The current in which the price should be provided. - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.flic.mqtt.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.flic.mqtt.md deleted file mode 100644 index 4eb8ec0dd..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.flic.mqtt.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.flic.mqtt -title: Flic MQTT -sidebar_label: Flic MQTT -original_id: org.apache.streampipes.connect.adapters.flic.mqtt ---- - - - - - -

- -

- -*** - -## Description - -Connect Flic Smart Button over MQTT - -*** - -## Required input - -This adapter uses the MQTT protocol and requires the data in following exemplary JSON format: -{ "timestamp": 1584973344615, "click_type": SINGLE, "button_id": button1 }. -*** - -## Configuration - -### Broker URL - -Example: tcp://test-server.com:1883 (Protocol required. Port required) - -### Access Mode - -The user can choose between unauthenticated or authenticated access. - -### TOPIC -The topic the MQTT broker publishes to. - -## Output - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.gdelt.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.gdelt.md deleted file mode 100644 index 0b80e472b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.gdelt.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.gdelt -title: GDELT -sidebar_label: GDELT -original_id: org.apache.streampipes.connect.adapters.gdelt ---- - - - - - -

- -

- -*** - -## Description -This adapter provides news events from the GDELT API. - -## Configuration - -(no futher configuration required) - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iex.news.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iex.news.md deleted file mode 100644 index a0f6cae2a..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iex.news.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.iex.news -title: IEX Cloud News -sidebar_label: IEX Cloud News -original_id: org.apache.streampipes.connect.adapters.iex.news ---- - - - - - -

- -

- -*** - -## Description -This adapter provides news events from the IEXCloud news API. An API key from IEXCloud is required. -Visit IEX Cloud for more info. - -*** - -## Configuration - - -### API Token -A valid API token from the IEXCloud API. - -### Stock Symbol -A stock symbol that should be monitored. - - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iex.stocks.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iex.stocks.md deleted file mode 100644 index a3ea09745..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iex.stocks.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.iex.stocks -title: IEX Cloud Stock Quotes -sidebar_label: IEX Cloud Stock Quotes -original_id: org.apache.streampipes.connect.adapters.iex.stocks ---- - - - - - -

- -

- -*** - -## Description -This adapter provides news events from the IEXCloud news API. An API key from IEXCloud is required. -Visit IEX Cloud for more info. - -*** - -## Configuration - - -### API Token -A valid API token from the IEXCloud API. - -### Stock Symbol -A stock symbol that should be monitored. - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.image.set.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.image.set.md deleted file mode 100644 index f9c910c4d..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.image.set.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.image.set -title: Image Upload (Set) -sidebar_label: Image Upload (Set) -original_id: org.apache.streampipes.connect.adapters.image.set ---- - - - - - -

- -

- -*** - -## Description -Upload a zip file of images and create an event per image - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.image.stream.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.image.stream.md deleted file mode 100644 index 03d8f81cf..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.image.stream.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.image.stream -title: Image Upload (Stream) -sidebar_label: Image Upload (Stream) -original_id: org.apache.streampipes.connect.adapters.image.stream ---- - - - - - -

- -

- -*** - -## Description - -Upload a zip file of images and create an event per image - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.influxdb.set.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.influxdb.set.md deleted file mode 100644 index 794109d5a..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.influxdb.set.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.influxdb.set -title: InfluxDB Set Adapter -sidebar_label: InfluxDB Set Adapter -original_id: org.apache.streampipes.connect.adapters.influxdb.set ---- - - - - - -

- -

- -*** - -## Description -Creates a data set for an InfluxDB measurement. - -*** - -## Configuration - - - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.influxdb.stream.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.influxdb.stream.md deleted file mode 100644 index b10fb21e2..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.influxdb.stream.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.influxdb.stream -title: InfluxDB Stream Adapter -sidebar_label: InfluxDB Stream Adapter -original_id: org.apache.streampipes.connect.adapters.influxdb.stream ---- - - - - - -

- -

- -*** - -## Description -Creates a data stream for an InfluxDB measurement. - -*** - -## Configuration - - - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iss.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iss.md deleted file mode 100644 index bf1fa5c7e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.iss.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.iss -title: ISS Location -sidebar_label: ISS Location -original_id: org.apache.streampipes.connect.adapters.iss ---- - - - - - -

- -

- -*** - -## Description - -Shows the live position of the International Space Station (ISS), updated every two seconds. - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.mysql.set.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.mysql.set.md deleted file mode 100644 index 4ccecc38e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.mysql.set.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.mysql.set -title: MySQL Set Adapter -sidebar_label: MySQL Set Adapter -original_id: org.apache.streampipes.connect.adapters.mysql.set ---- - - - - - -

- -

- -*** - -## Description - -Creates a data set from an SQL table - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.mysql.stream.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.mysql.stream.md deleted file mode 100644 index 594d70d11..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.mysql.stream.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.mysql.stream -title: MySql Stream Adapter -sidebar_label: MySql Stream Adapter -original_id: org.apache.streampipes.connect.adapters.mysql.stream ---- - - - - - -

- -

- -*** - -## Description - -Creates a data stream for a SQL table - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.netio.mqtt.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.netio.mqtt.md deleted file mode 100644 index dca15946d..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.netio.mqtt.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.netio.mqtt -title: NETIO MQTT M2M -sidebar_label: NETIO MQTT M2M -original_id: org.apache.streampipes.connect.adapters.netio.mqtt ---- - - - - - -

- -

- -*** - -## Description - -Connect Robots running on ROS - - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -Describe the configuration parameters here - -### Ros Bridge - -Example: test-server.com (No protocol) - -### Port - -The port of the ROS instance. - -### Topic - -Example: /battery (Starts with /) - - -## Output - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.netio.rest.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.netio.rest.md deleted file mode 100644 index 991ad6297..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.netio.rest.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.netio.rest -title: NETIO http JSON -sidebar_label: NETIO http JSON -original_id: org.apache.streampipes.connect.adapters.netio.rest ---- - - - - - -

- -

- -*** - -## Description - -Connect Robots running on ROS - - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -Describe the configuration parameters here - -### Ros Bridge - -Example: test-server.com (No protocol) - -### Port - -The port of the ROS instance. - -### Topic - -Example: /battery (Starts with /) - - -## Output - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera.md deleted file mode 100644 index 98d33411c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera -title: NSW Traffic Cameras -sidebar_label: NSW Traffic Cameras -original_id: org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera ---- - - - - - -

- -

- -*** - -## Description - -Traffic camera images produced by NSW Australia - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.opcua.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.opcua.md deleted file mode 100644 index 3fc7169e2..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.opcua.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.opcua -title: OPC UA -sidebar_label: OPC UA -original_id: org.apache.streampipes.connect.adapters.opcua ---- - - - - - -

- -

- -*** - -## Description - -Reads values from an OPC-UA server repeatedly - -*** - -## Required Input - -*** - -## Configuration - -### Polling Interval - -Duration of the polling interval in seconds - -### Anonymous vs. Username/Password - -Choose whether you want to connect anonymously or authenticate using your credentials. - -     **Anonymous**: No further information required
-     **Username/Password**: Insert your `username` and `password` to access the OPC UA server - -### OPC UA Server - -Where can the OPC UA server be found? - -     **URL**: Specify the server's full `URL` (including port), can be with our without leading `opc.tcp://`
-     **Host/Port**: Insert the `host` address (with or without leading `opc.tcp://`) and the `port`
- -### Namespace Index - -Requires the index of the namespace you want to connect to. - -### Node ID - -The identifier of the node you want to read from, numbers and strings are both valid. - -### Available Nodes - -Shows all available nodes once namespace index and node ID are given. -Select as much as you like to query. - -*** diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.plc4x.modbus.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.plc4x.modbus.md deleted file mode 100644 index ddc8a8ee4..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.plc4x.modbus.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.plc4x.modbus -title: PLC4X MODBUS -sidebar_label: PLC4X MODBUS -original_id: org.apache.streampipes.connect.adapters.plc4x.modbus ---- - - - - - -

- -

- -*** - -## Description - -Shows the live position of the International Space Station (ISS), updated every two seconds. - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.plc4x.s7.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.plc4x.s7.md deleted file mode 100644 index 9a94da164..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.plc4x.s7.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.plc4x.s7 -title: PLC4X S7 -sidebar_label: PLC4X S7 -original_id: org.apache.streampipes.connect.adapters.plc4x.s7 ---- - - - - - -

- -

- -*** - -## Description - -Shows the live position of the International Space Station (ISS), updated every two seconds. - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.ros.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.ros.md deleted file mode 100644 index 12e7a3db4..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.ros.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.ros -title: ROS Bridge -sidebar_label: ROS Bridge -original_id: org.apache.streampipes.connect.adapters.ros ---- - - - - - -

- -

- -*** - -## Description - -Connect Robots running on ROS - - -*** - -## Required input - - - -*** - -## Configuration - -Describe the configuration parameters here - -### Ros Bridge - -Example: test-server.com (No protocol) - -### Port - -The port of the ROS instance. - -### Topic - -Example: /battery (Starts with /) - - -## Output - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.machine.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.machine.md deleted file mode 100644 index a9c4fae12..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.machine.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.simulator.machine -title: Machine Data Simulator -sidebar_label: Machine Data Simulator -original_id: org.apache.streampipes.connect.adapters.simulator.machine ---- - - - - - -

- -

- -*** - -## Description - -Publishes various simulated machine sensor data in a configurable time interval (in milliseconds). -Sensors are: -* flowrate -* pressure -* waterlevel -*** diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.randomdataset.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.randomdataset.md deleted file mode 100644 index 248070299..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.randomdataset.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.simulator.randomdataset -title: Random Data Simulator (Set) -sidebar_label: Random Data Simulator (Set) -original_id: org.apache.streampipes.connect.adapters.simulator.randomdataset ---- - - - - - -

- -

- -*** - -## Description - -Publishes a bounded stream of random events. - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.randomdatastream.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.randomdatastream.md deleted file mode 100644 index 12564088c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.simulator.randomdatastream.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.simulator.randomdatastream -title: Random Data Simulator (Stream) -sidebar_label: Random Data Simulator (Stream) -original_id: org.apache.streampipes.connect.adapters.simulator.randomdatastream ---- - - - - - -

- -

- -*** - -## Description - -Publishes a continuous stream of random events - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.slack.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.slack.md deleted file mode 100644 index e21951c73..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.slack.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.slack -title: Slack -sidebar_label: Slack -original_id: org.apache.streampipes.connect.adapters.slack ---- - - - - - -

- -

- -*** - -## Description - -Subscribes to a Slack channel - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.ti.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.ti.md deleted file mode 100644 index 11252204f..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.ti.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.ti -title: TI Sensor Tag -sidebar_label: TI Sensor Tag -original_id: org.apache.streampipes.connect.adapters.ti ---- - - - - - -

- -

- -*** - -## Description - -Consumes messages from a broker using the MQTT protocol - - -*** - -## Configuration - -Describe the configuration parameters here - -### Broker Url - -Example: tcp://test-server.com:1883 (Protocol required. Port required)" - -### Topic - -Example: test/topic - -### Access Mode - -Unauthenticated or Authenticated (Username/Password) - -## Output - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.wikipedia.edit.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.wikipedia.edit.md deleted file mode 100644 index 015004fd4..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.wikipedia.edit.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.wikipedia.edit -title: Wikipedia Edits -sidebar_label: Wikipedia Edits -original_id: org.apache.streampipes.connect.adapters.wikipedia.edit ---- - - - - - -

- -

- -*** - -## Description - -Continuously publishes recent Wikipedia edits - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.wikipedia.new.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.wikipedia.new.md deleted file mode 100644 index f656f7e64..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.adapters.wikipedia.new.md +++ /dev/null @@ -1,40 +0,0 @@ ---- -id: org.apache.streampipes.connect.adapters.wikipedia.new -title: Wikipedia New Articles -sidebar_label: Wikipedia New Articles -original_id: org.apache.streampipes.connect.adapters.wikipedia.new ---- - - - - - -

- -

- -*** - -## Description - -Continuously publishes articles created on Wikipedia - - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.file.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.file.md deleted file mode 100644 index 9db6eedd0..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.file.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.connect.protocol.stream.file -title: File Stream -sidebar_label: File Stream -original_id: org.apache.streampipes.connect.protocol.stream.file ---- - - - - - -

- -

- -*** - -## Description - -Continuously streams the content from a file - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.http.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.http.md deleted file mode 100644 index ae603e97f..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.http.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.connect.protocol.stream.http -title: HTTP Stream -sidebar_label: HTTP Stream -original_id: org.apache.streampipes.connect.protocol.stream.http ---- - - - - - -

- -

- -*** - -## Description - -Continuously fetched events from an HTTP REST endpoint. - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.httpserver.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.httpserver.md deleted file mode 100644 index 0626268d2..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.httpserver.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.connect.protocol.stream.httpserver -title: HTTP Server -sidebar_label: HTTP Server -original_id: org.apache.streampipes.connect.protocol.stream.httpserver ---- - - - - - -

- -

- -*** - -## Description - -Continuously fetched events from an HTTP REST endpoint. - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.kafka.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.kafka.md deleted file mode 100644 index 9e8161719..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.kafka.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.connect.protocol.stream.kafka -title: Apache Kafka -sidebar_label: Apache Kafka -original_id: org.apache.streampipes.connect.protocol.stream.kafka ---- - - - - - -

- -

- -*** - -## Description - -Consumes messages from an Apache Kafka broker - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.mqtt.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.mqtt.md deleted file mode 100644 index 4a5f1da80..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.mqtt.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.connect.protocol.stream.mqtt -title: MQTT -sidebar_label: MQTT -original_id: org.apache.streampipes.connect.protocol.stream.mqtt ---- - - - - - -

- -

- -*** - -## Description - -Consumes messages from a broker using the MQTT protocol - - -*** - -## Configuration - -Describe the configuration parameters here - -### Broker Url - -Example: tcp://test-server.com:1883 (Protocol required. Port required)" - -### Access Mode - -Unauthenticated or Authenticated (Username/Password) - -## Output - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.pulsar.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.pulsar.md deleted file mode 100644 index 0796346fe..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.connect.protocol.stream.pulsar.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.connect.protocol.stream.pulsar -title: Apache Pulsar -sidebar_label: Apache Pulsar -original_id: org.apache.streampipes.connect.protocol.stream.pulsar ---- - - - - - -

- -

- -*** - -## Description - -Consumes messages from an Apache Pulsar broker - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.flink.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.flink.md deleted file mode 100644 index 93e1e970b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.flink.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: org.apache.streampipes.processor.geo.flink -title: Spatial Grid Enrichment -sidebar_label: Spatial Grid Enrichment -original_id: org.apache.streampipes.processor.geo.flink ---- - - - - - -

- -

- -*** - -## Description - -Groups spatial events into cells of a given size. -The result is like a chessboard pattern in which the geo coordinates are inserted. The user can define the coordinates of the first field. - -*** - -## Required input -Requires a latitude and longitude in the data stream. - -## Configuration - -* Latitude property -* Longitude property -* The size of the cell -* Latitude and longitude of the first cell - -## Output -Appends the grid cell coordinates to the input event diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.geocoding.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.geocoding.md deleted file mode 100644 index 3d42f6eeb..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.geocoding.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -id: org.apache.streampipes.processor.geo.jvm.geocoding -title: Google Maps Geocoder -sidebar_label: Google Maps Geocoder -original_id: org.apache.streampipes.processor.geo.jvm.geocoding ---- - - - - - -

-

- -*** - -## Description - -This processor computes the latitude and longitude values from a location (a place name such as "Karlsruhe, Germany -") and adds the result to the event. - -*** - -## Required input - -Input event requires to have a field which contains the name of a place. - -*** - -## Configuration - -### Place - -The field of the input event that should be used to compute the lat/lng values. - -## Output - -Outputs a similar event like below. - -``` -{ - 'latitude': 6.927079, - 'longitude': 79.861244 -} -``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.reversegeocoding.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.reversegeocoding.md deleted file mode 100644 index 3f3319fb4..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.reversegeocoding.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -id: org.apache.streampipes.processor.geo.jvm.reversegeocoding -title: Reverse Geocoder -sidebar_label: Reverse Geocoder -original_id: org.apache.streampipes.processor.geo.jvm.reversegeocoding ---- - - - - - -

-

- -*** - -## Description - -This processor computes place name based on given lat/lng coordinates that are transmitted as fields from an event. - -This processor automatically downloads the file cities1000.zip from Geonames (http://download.geonames.org/export/dump/cities1000.zip). This file is provided under the CC BY 4.0 license, see https://creativecommons.org/licenses/by/4.0/ for license details. - -*** - -## Required input - -Input event requires to have latitude and longitude values. - -*** - -## Configuration - -### Latitude - -The field containing the latitude value. - -### Longitude - -The field containing the longitude value. - -## Output - -Outputs a similar event like below. - -``` -{ - 'place': 'Colombo' -} -``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.staticgeocoding.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.staticgeocoding.md deleted file mode 100644 index c63805bae..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.geo.jvm.staticgeocoding.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -id: org.apache.streampipes.processor.geo.jvm.staticgeocoding -title: Static Google Maps Geocoder -sidebar_label: Static Google Maps Geocoder -original_id: org.apache.streampipes.processor.geo.jvm.staticgeocoding ---- - - - - - -

-

- -*** - -## Description - -This processor computes the latitude and longitude values from a fixed location (a place name such as "Karlsruhe -, Germany -") and adds the result to the event. - -*** - -## Required input - -Input event requires to have a field which contains the name of a place. - -*** - -## Configuration - -### Place - -The place name that should be converted to a lat/lng combination - -## Output - -Outputs a similar event like below. - -``` -{ - 'latitude': 6.927079, - 'longitude': 79.861244 -} -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification.md deleted file mode 100644 index 7a6aa8cc3..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -id: org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification -title: Generic Image Classification -sidebar_label: Generic Image Classification -original_id: org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification ---- - - - - -

- -

- -*** - -## Description - -Image + Classification Description (Generic Model) - -*** - -## Required input - -Input events require to have an image field. - -*** - -## Configuration - -Describe the configuration parameters here - -### Image field - -Field that contains the image. - -## Output diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper.md deleted file mode 100644 index c4f96577b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -id: org.apache.streampipes.processor.imageclassification.jvm.image-cropper -title: Image Cropper -sidebar_label: Image Cropper -original_id: org.apache.streampipes.processor.imageclassification.jvm.image-cropper ---- - - - - -

- -

- -*** - -## Description - -Image Enrichment: Crops an + image based on + given bounding box coordinates - -*** - -## Required input -An image and an array with bounding boxes. -A box consists of the x and y coordinates in the image as well as the height and width - -## Output -A new event for each box containing the cropped image diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher.md deleted file mode 100644 index e008be9ec..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -id: org.apache.streampipes.processor.imageclassification.jvm.image-enricher -title: Image Enricher -sidebar_label: Image Enricher -original_id: org.apache.streampipes.processor.imageclassification.jvm.image-enricher ---- - - - - - -

- -

- -*** - -## Description - -Image Enrichment: Enriches an + image with + given bounding box coordinates - -## Required input -An image and an array with bounding boxes, an array with scores and an array with labels. -A box consists of the x and y coordinates in the image as well as the height and width, and the classindex with score - - -## Output -A new event containing the image with bounding boxes rendered according to the boxes of the input event diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.qrcode.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.qrcode.md deleted file mode 100644 index 4bfd514a9..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processor.imageclassification.qrcode.md +++ /dev/null @@ -1,69 +0,0 @@ ---- -id: org.apache.streampipes.processor.imageclassification.qrcode -title: QR Code Reader -sidebar_label: QR Code Reader -original_id: org.apache.streampipes.processor.imageclassification.qrcode ---- - - - - - -

- -

- -*** - -## Description - -QR Code Reader: Detects a QR Code in an image - -*** - -## Required input - -Input events require to have an image field. - -*** - -## Configuration - -### Image - -Image of the QR code - -### Send placeholder value if no qr code is detected - -It is a boolean selection. - -### Placeholder value - -Place holder value - -## Output - -Outputs a similar event like below. - -``` -{ - 'qrvalue': 'http://githhub.com/', - 'timestamp': 1621244783151 -} -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.aggregation.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.aggregation.md deleted file mode 100644 index 5e9e9e808..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.aggregation.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -id: org.apache.streampipes.processors.aggregation.flink.aggregation -title: Aggregation -sidebar_label: Aggregation -original_id: org.apache.streampipes.processors.aggregation.flink.aggregation ---- - - - - - -

- -

- -*** - -## Description - -Performs different aggregation functions based on a sliding time window (e.g., average, sum, min, max) - -*** - -## Required input - -The aggregation processor requires a data stream that has at least one field containing a numerical value. - -*** - -## Configuration - -### Group by -The aggregation function can be calculated separately (partitioned) by the selected field value. - -### Output every -The frequency in which aggregated values are sent in seconds. - -### Time window -The size of the time window in seconds - -### Aggregated Value -The field used for calculating the aggregation value. - -## Output - -This processor appends the latest aggregated value to every input event that arrives. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.count.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.count.md deleted file mode 100644 index c9516f62b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.count.md +++ /dev/null @@ -1,67 +0,0 @@ ---- -id: org.apache.streampipes.processors.aggregation.flink.count -title: Count Aggregation -sidebar_label: Count Aggregation -original_id: org.apache.streampipes.processors.aggregation.flink.count ---- - - - - - -

- -

- -*** - -## Description - -Performs an aggregation based on a given field and outputs the number of occurrences. -Example: Count the number of vehicle positions per vehicleId. -The Count aggregation requires a time window, used to perform the count aggregation and a field used to aggregate -values. - -*** - -## Required input -There is no specific input required. - -*** - -## Configuration -### FieldToCount -Specifies the field containing the values that should be counted. - -### TimeWindowSize -Specifies the size of the time window and consequently the number of values that are aggregated each time. - -### Time Window Scale -Specifies the scale/unit of the time window. There are three different time scales to choose from: seconds, minutes or hours. - -## Output -The output event is composed of two fields. The field "value" specifies the value to count. -The second field "count" returns the number of occurrences. -Example: -``` -{ - 'value': 'vehicleId', - 'count': 12 -} -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.eventcount.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.eventcount.md deleted file mode 100644 index d744a88d2..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.eventcount.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: org.apache.streampipes.processors.aggregation.flink.eventcount -title: Event Counter -sidebar_label: Event Counter -original_id: org.apache.streampipes.processors.aggregation.flink.eventcount ---- - - - - - - - -*** - -## Description -Counts the number of events arriving within a time window. An event is emitted every time the time window expires. - -*** - -## Required input -There is no specific input required. - -*** - -## Configuration -Time Window: The scale and size of the time window. - -### TimeWindowSize -Specifies the size of the time window. - -### Time Window Scale -Specifies the scale/unit of the time window. There are three different time scales to choose from: seconds, minutes or hours. - -## Output -``` -{ - 'timestamp': 1601301980014, - 'count': 12 -} -``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.rate.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.rate.md deleted file mode 100644 index 49a9f8b1e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.aggregation.flink.rate.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -id: org.apache.streampipes.processors.aggregation.flink.rate -title: Event Rate -sidebar_label: Event Rate -original_id: org.apache.streampipes.processors.aggregation.flink.rate ---- - - - - - -

- -

- -*** - -## Description -Computes the current event rate. Output is a number representing events per second. - -*** - -## Required input -The event rate processor works with any stream and does not have any specific requirements. - -*** - -## Configuration - - -### Time Baseline -Specifies the size of the window used used for calculating the rate in seconds. This parameters also determines the output rate. - -## Output -``` -{ - 'rate': 0.8 -} -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.changedetection.jvm.cusum.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.changedetection.jvm.cusum.md deleted file mode 100644 index c83f6e5b3..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.changedetection.jvm.cusum.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -id: org.apache.streampipes.processors.changedetection.jvm.cusum -title: Cusum -sidebar_label: Cusum -original_id: org.apache.streampipes.processors.changedetection.jvm.cusum ---- - - - - - - - -*** - -## Description - -Performs change detection on a single dimension of the incoming data stream. A change is detected if the cumulative deviation from the mean exceeds a certain threshold. This implementation tracks the mean and the standard deviation using Welford's algorithm, which is well suited for data streams. - -*** - -## Required input - -The cusum processor requires a data stream that has at least one field containing a numerical value. - -*** - -## Configuration - -### Value to observe -Specify the dimension of the data stream (e.g. the temperature) on which to perform change detection. - -### Parameter `k` -`k` controls the sensitivity of the change detector. Its unit are standard deviations. For an observation `x_n`, the Cusum value is `S_n = max(0, S_{n-1} - z-score(x_n) - k)`. Thus, the cusum-score `S` icnreases if `S_{n-1} - z-score(x_n) > k`. - -### Parameter `h` -The alarm theshold in standard deviations. An alarm occurs if `S_n > h` - -## Output - -This processor outputs the original data stream plus - -- `cusumLow`: The cusum value for negative changes -- `cusumHigh`: The cusum value for positive changes -- `changeDetectedLow`: Boolean indicating if a negative change was detected -- `changeDetectedHigh`: Boolean indicating if a positive change was detected diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.math.mathop.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.math.mathop.md deleted file mode 100644 index e619125c7..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.math.mathop.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: org.apache.streampipes.processors.enricher.flink.processor.math.mathop -title: Math -sidebar_label: Math -original_id: org.apache.streampipes.processors.enricher.flink.processor.math.mathop ---- - - - - - -

- -

- -*** - -## Description - -Performs calculations on event properties (+, -, *, /, %). - -*** - -## Required input -The math processor works with any event that has at least one field containing a numerical value. - -*** - -## Configuration - -### Left operand -The field from the input event that should be used as the left operand. - -### Right operand -The field from the input event that should be used as the right operand. - -### Operation -The math operation that should be performed. - -## Output -The processor appends the calculation result to each input event. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop.md deleted file mode 100644 index 0f044c568..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop -title: Static Math -sidebar_label: Static Math -original_id: org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop ---- - - - - - -

- -

- -*** - -## Description - -Performs calculation on an event property with a static value (+, -, *, /, %). - -*** - -## Required input -The math processor works with any event that has at least one field containing a numerical value. - -*** - -## Configuration - -### Left operand -The field from the input event that should be used as the left operand. - -### Right operand value -Specify the value of the right operand. - -### Operation -The math operation that should be performed. - -## Output -The processor appends the calculation result to each input event. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.trigonometry.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.trigonometry.md deleted file mode 100644 index 2fac83e2e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.trigonometry.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: org.apache.streampipes.processors.enricher.flink.processor.trigonometry -title: Trigonometry Functions -sidebar_label: Trigonometry Functions -original_id: org.apache.streampipes.processors.enricher.flink.processor.trigonometry ---- - - - - - -

- -

- -*** - -## Description - -Performs Trigonometric functions (sin, cos, tan) on event properties. - -*** - -## Required input -The trigonometry processor works with any event that has at least one field containing a numerical value. - -*** - -## Configuration - -Describe the configuration parameters here - -### Alpha -The field that should be used for calculating the trigonometric function. - - -### Operation -The trigonometric function that should be calculated. - -## Output -The processor appends the calculation result to each input event. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.urldereferencing.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.urldereferencing.md deleted file mode 100644 index 01fed70bc..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.processor.urldereferencing.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: org.apache.streampipes.processors.enricher.flink.processor.urldereferencing -title: URL Dereferencing -sidebar_label: URL Dereferencing -original_id: org.apache.streampipes.processors.enricher.flink.processor.urldereferencing ---- - - - - - -

- -

- -*** - -## Description - -Parses and appends the html page as a string to event. - -*** - -## Required input -The URL Dereferencing processor requires an input stream that provides an input field of type 'string', representing -the URL to dereference. - -*** - -## Configuration - -### URL -The field containing the URL to dereference. - -## Output -The processor appends the extracted HTML page to each input event. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.timestamp.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.timestamp.md deleted file mode 100644 index 6dd6bfcec..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.flink.timestamp.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -id: org.apache.streampipes.processors.enricher.flink.timestamp -title: Timestamp Enricher -sidebar_label: Timestamp Enricher -original_id: org.apache.streampipes.processors.enricher.flink.timestamp ---- - - - - - -

- -

- -*** - -## Description -Appends the current time in ms to the event payload. - -*** - -## Required input -The timestamp enricher works with any input event. - -*** - -## Configuration - -(no further configuration required) - -## Output -This processor appends the current system time to every input event. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.jvm.sizemeasure.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.jvm.sizemeasure.md deleted file mode 100644 index 520018f6c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.enricher.jvm.sizemeasure.md +++ /dev/null @@ -1,50 +0,0 @@ ---- -id: org.apache.streampipes.processors.enricher.jvm.sizemeasure -title: Size Measure -sidebar_label: Size Measure -original_id: org.apache.streampipes.processors.enricher.jvm.sizemeasure ---- - - - - - -

- -

- -*** - -## Description - -Measures the size of an incoming event and appends this number to the event by serializing it. - -*** - -## Required input -The size measure processor does not have any specific input requirements. - -*** - -## Configuration - -You can specify if the size should be in Bytes, Kilobytes (1024 Bytes) or in Megabytes (1024 Kilobytes). - -## Output -The size measure processor appends the size of the event (without the field, which is getting added) as a double. The rest of the event stays the same. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.compose.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.compose.md deleted file mode 100644 index c02b529f8..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.compose.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.compose -title: Compose -sidebar_label: Compose -original_id: org.apache.streampipes.processors.filters.jvm.compose ---- - - - - - -

- -

- -*** - -## Description - -Merges two event streams. Any time, a new input event arrives, it is merged with the last input event from the other -event stream and forwarded. - -*** - -## Required input -The Compose processor does not have any specific input requirements. - -*** - -## Configuration - -(no further configuration required) - -## Output -The compose processor has a configurable output that can be selected by the user at pipeline modeling time. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.enrich.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.enrich.md deleted file mode 100644 index a119dc1cb..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.enrich.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.enrich -title: Merge By Enrich -sidebar_label: Merge By Enrich -original_id: org.apache.streampipes.processors.filters.jvm.enrich ---- - - - - - -

- -

- -*** - -## Description -Merges two data streams by enriching one of the streams with the properties of the other stream. The output frequency is the same as the frequency of the stream which is enriched. -*** - -## Required input -None -*** - -## Configuration - -* Select the stream which should be enriched with the properties of the other stream. - * The last event of the stream is hold in state and each event of the other stream is enriched by the properties the user selected - -## Output -The compose processor has a configurable output that can be selected by the user at pipeline modeling time. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.limit.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.limit.md deleted file mode 100644 index 5fc76945e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.limit.md +++ /dev/null @@ -1,71 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.limit -title: Rate Limit -sidebar_label: Rate Limit -original_id: org.apache.streampipes.processors.filters.jvm.limit ---- - - - - - -

- -

- -*** - -## Description -This limits the number of events emitted based on a specified criterion such as time, and number of events. - -*** - -## Required input -The processor works with any input event. - -*** - -## Configuration - -### Enable Grouping -Enabling this will use grouping with rate-limiting (note: disabling this will ignore `Grouping Field` property). - -### Grouping Field -Runtime field to be used as the grouping key. If grouping is disabled, this setting will be ignored. - -### Window Type -This specifies the type of window to be used (time / length / cron). - -### Length Window Size -Length window size in event count (note: only works with length window type). - -### Time Window Size -Time window size in milliseconds (note: only works with time window type). - -### Cron Window Expression -Cron expression [Link](https://www.freeformatter.com/cron-expression-generator-quartz.html) to trigger and emit events (i.e `0 * * ? * *` for every minute) (note: only works with cron window type). - -### Output Event Selection -This specifies the event(s) that are selected to be emitted. -- First: emit first event of the window. -- Last: emit last event of the window. -- All: emit all events of the window. - -## Output -The processor outputs events which satisfies rate-limiting conditions. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.merge.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.merge.md deleted file mode 100644 index 79516d36a..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.merge.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.merge -title: Synchronize Two Streams -sidebar_label: Synchronize Two Streams -original_id: org.apache.streampipes.processors.filters.jvm.merge ---- - - - - - -

- -

- -*** - -## Description - -Merges two event streams by their timestamp. -Two events of the different streams are merged when they occure to the same time - -The following figure shows how the events of the two data streams will be mergrged: - -

- -

- -*** - -## Required input -Each of the data streams needs a timestamp. - -*** - -## Configuration - -* For each stream a the timestamp property on which the merger is performed has to be selected -* The Time Interval describes the maximum value between two events to decide whether they are a match. To be a valid match the following function must be true: | timestamp_stream_1 - timestamp_stream_2 | < interval - -## Output -The Compose processor has a configurable output that can be selected by the user at pipeline modeling time. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.numericalfilter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.numericalfilter.md deleted file mode 100644 index 82b4c5b7c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.numericalfilter.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.numericalfilter -title: Numerical Filter -sidebar_label: Numerical Filter -original_id: org.apache.streampipes.processors.filters.jvm.numericalfilter ---- - - - - - -

- -

- -*** - -## Description -The Numerical Filter processor filters numerical values based on a given threshold. - -*** - -## Required input -The processor works with any input event that has one field containing a numerical value. - -*** - -## Configuration - -### Field -Specifies the field name where the filter operation should be applied on. - - -### Operation -Specifies the filter operation that should be applied on the field. - -### Threshold value -Specifies the threshold value. - -## Output -The processor outputs the input event if it satisfies the filter expression. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter.md deleted file mode 100644 index f002872ff..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.numericaltextfilter -title: Numerical Text Filter -sidebar_label: Numerical Text Filter -original_id: org.apache.streampipes.processors.filters.jvm.numericaltextfilter ---- - - - - - -

- -

- - -*** - -## Description -The Numerical Text Filter processor filters numerical values based on a given threshold and text values -based on a given string. It only forwards events in case both criteria are satisfied. - -*** - -## Required input -The processor works with any input event that has one field containing a numerical value and one field -containing a text. - -*** - -## Configuration - -### Number Field -Specifies the field name where the filter operation should be applied on. - -### Number Operation -Specifies the filter operation that should be applied on the field. - -### Number Threshold -Specifies the threshold value. - -### Text Field -The field containing the text that should be filtered. - -### Text Operation -The operation used by the filter processor (equals or matches). - -### Text Keyword -Specifies the keyword to filter the text field. - -## Output -The processor outputs the input event if it satisfies the filter expression. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.project.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.project.md deleted file mode 100644 index ca2c0567c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.project.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.project -title: Projection -sidebar_label: Projection -original_id: org.apache.streampipes.processors.filters.jvm.project ---- - - - - - -

- -

- -*** - -## Description -Outputs a selectable subset of an input event type. - -*** - -## Required input -The project processor works with any input event stream. - -*** - -## Configuration - -(no further configuration required) - -## Output -The output depends on the fields selected at pipeline development time. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.textfilter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.textfilter.md deleted file mode 100644 index c6e5fa9ce..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.textfilter.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.textfilter -title: Text Filter -sidebar_label: Text Filter -original_id: org.apache.streampipes.processors.filters.jvm.textfilter ---- - - - - - -

- -

- -*** - -## Description -The Text Filter processor filters text values based on a given string. - -*** - -## Required input -The processor works with any input event that has one field containing a text. - -*** - -## Configuration - -### Text Field -The field containing the text that should be filtered. - - -### Operation -The operation used by the filter processor (equals or matches) - -## Output -The processor outputs the input event if it satisfies the filter expression. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.threshold.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.threshold.md deleted file mode 100644 index a9055c851..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.filters.jvm.threshold.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: org.apache.streampipes.processors.filters.jvm.threshold -title: Threshold Detector -sidebar_label: Threshold Detector -original_id: org.apache.streampipes.processors.filters.jvm.threshold ---- - - - - - -

- -

- -*** - -## Description -The Threshold Detector processor appends a boolean whether the condition is fulfilled or not - -*** - -## Required input -The processor works with any input event that has one field containing a numerical value. - -*** - -## Configuration - -### Field -Specifies the field name where the filter operation should be applied on. - - -### Operation -Specifies the filter operation that should be applied on the field. - -### Threshold value -Specifies the threshold value. - -## Output -Appends a boolean with the value whether the condition is fulfilled or not. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo.md deleted file mode 100644 index 7317682b1..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -id: org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo -title: Creates JTS Point -sidebar_label: Creates JTS Point -original_id: org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo ---- - - - - - -

- -

- -*** - -## Description - -This processor creates a JTS Point geometry from latitude and longitude value. - -*** - -## Required input - -* Ontology Vocabulary Latitude -* Ontology Vocabulary Longitude -* Integer value representing EPSG Code - - -*** - -## Configuration - -Creates a JTS Geometry Point from Longitude (x) and Latitude (y) values in the coordinate reference system represented by the EPSG code. -An empty point geometry is created if latitude or longitude value is missing in the event (e.g. null value) or values are out of range. Allowed values for Longitude are between -180.00 and 180.00; Latitude values between -90.00 and 90.00. - -### 1st parameter -Latitude value - -### 2nd parameter -Longitude value - -### 3rd parameter -EPSG code value - -*** - -## Output - -Adds a point geometry in the Well Known Text notation and in Longitude (x) Latitude (y) axis order to the stream. - -### Example -* Input stream:
- `{latitude=48.5622, longitude=-76.3501, EPSG=4326}` - -* Output Stream
- `{latitude=48.5622, longitude=-76.3501, EPSG=4326, geom_wkt=POINT (-76.3501 48.5622)}` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG.md deleted file mode 100644 index 5a61d273e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -id: org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG -title: EPSG Code -sidebar_label: EPSG Code -original_id: org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG ---- - - - - - -

- -

- - -*** - -## Description - -This processor adds ab integer value to the stream. This integer value represents -an [EPSG Code](https://en.wikipedia.org/wiki/EPSG_Geodetic_Parameter_Dataset) as an Spatial Reference System Identifier [(SRID)](https://en.wikipedia.org/wiki/Spatial_reference_system#Identifier). - - -*** - -## Required input - -None - -*** - -## Configuration - -Integer values, representing a spatial reference system [(SRS)](https://en.wikipedia.org/wiki/Spatial_reference_system#Identifier). Other possible values can be looked up under https://spatialreference.org/ref/epsg/. - -### Parameter - -4- to 5-digit key integer number. Default value is 4326 representing the World Geodetic System [(WGS84)](https://en.wikipedia.org/wiki/World_Geodetic_System#WGS84). - -*** -## Output - -Adds a number to the event. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory.md deleted file mode 100644 index 8f025649c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory.md +++ /dev/null @@ -1,84 +0,0 @@ ---- -id: org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory -title: Single Trajectory Creator -sidebar_label: Single Trajectory Creator -original_id: org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory ---- - - - - - -

- -

- -*** - -## Description - -This processor creates a JTS LineString geometry from JTS Points events, represent a trajectory. A trajectory is defined as the path that a moving object follows through space as a function of time. Each sub-point of this LineString represents a single event. The latest sub-point represents the latest geo-event. For each Point event it is also possible to store an additional m-value representing for example actually speed, distance, duration or direction of this event. A trajectory consists of at least two sub-point and can't be infinitive, so a threshold of maximum allowed sub-points is required. When the sub-point threshold is exceeded, the oldest point is removed from the LineString. -*** - -## Required input - -* WKT String of a JTS Point Geometry -* Integer value representing EPSG code -* Number value for M-value - - -*** - -## Configuration - -Creates a JTS Geometry LineString from a JTS Point Geometries events representing a trajectory. - - -### 1st parameter -Point WKT String - -### 2nd parameter -EPSG code value - -### 3rd parameter -M-value for each sub-point of the trajectory - -### 4rd parameter -String for a description text for the trajectory - -### 5rd parameter -Number of allowed sub-points - -*** - -## Output - -Adds a LineString geometry in the Well Known Text to the event, representing a trajectory. Also the description text is added to the event stream. The first existing event creates an empty LineString. - -### Example -Creating a LineString with a threshold of 2 allowed sub-points: - -* First Event: - * Point(8.12 41.23) --> LineString (empty) -* Second Event: - * Point(8.56 41.25) --> LineString(8.12 41.23, 8.56 41.25) -* Second Event: - * Point(8.84 40.98) --> LineString(8.56 41.25, 8.84 40.98) - -M-value is not represented in the LineString but will be stored for internal use! diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.distancecalculator.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.distancecalculator.md deleted file mode 100644 index 8a86104e7..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.distancecalculator.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -id: org.apache.streampipes.processors.geo.jvm.processor.distancecalculator -title: Distance Calculator -sidebar_label: Distance Calculator -original_id: org.apache.streampipes.processors.geo.jvm.processor.distancecalculator ---- - - - - - -

- -

- -*** - -## Description -Calculates the distance between two latitude/longitude pairs in a single event. - -*** - -## Required input -Requires a position of point on the Earth's surface specified by the two geographic coordinates: the longitude and latitude of the point. - -*** - -## Configuration - -### First Longitude -This is the first geographic coordinate that specifies the east-west position of a point on the Earth's surface. - -### First Latitude -This is the second geographic coordinate that specifies the north-south position of a point on the Earth's surface. - -### Second Longitude -This is the second geographic coordinate that specifies the east-west position of a point on the Earth's surface. - -### Second Latitude -This is the second geographic coordinate that specifies the north-south position of a point on the Earth's surface. - -## Output -{ - 'distance': 12.2 -} diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.speed.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.speed.md deleted file mode 100644 index fd5cfe80e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.speed.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -id: org.apache.streampipes.processors.geo.jvm.processor.speed -title: Speed Calculator -sidebar_label: Speed Calculator -original_id: org.apache.streampipes.processors.geo.jvm.processor.speed ---- - - - - - -*** - -## Description - -Calculates the speed (in km/h) based on latitude/longitude values in a data stream. Therefore it uses the GPS and timestamps values of consecutive events. -It calculates the distance between two points (events) and how much time has passed. Based on those values the speed is calculated. - -*** - -## Required input - -Requires a data stream that provides latitude and longitude values as well as a timestamp. - -*** - -## Configuration - -### Timestamp field - -### Latitude field - -### Longitude field - -### Count window -Describes the number of stored events, used for the calculation. -E.g. a value of 5 means that thhe current event and the event (t-5) are used for the speed calculation. - -## Output -Appends the calculated speed in km/h. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator.md deleted file mode 100644 index f44002ced..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator.md +++ /dev/null @@ -1,71 +0,0 @@ ---- -id: org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator -title: Static Distance Calculator -sidebar_label: Static Distance Calculator -original_id: org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator ---- - - - - - -*** - -## Description - -Calculates the distance (in km) between a fixed location (e.g., a place) and a latitude/longitude pair of an input - event. - -*** - -## Required input - -Requires a data stream that provides latitude and longitude values. - -*** - -## Configuration - -Describe the configuration parameters here - -### Latitude field - -The field containing the latitude value. - -### Longitude field - -The field containing the longitude value. - -### Latitude - -The latitude value of the fixed location - -### Longitude - -The longitude value of the fixed location - -## Output - -Outputs a similar event like below. - -``` -{ - 'distance': 12.5 -} -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.absence.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.absence.md deleted file mode 100644 index bebd7623b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.absence.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.processors.pattern-detection.flink.absence -title: Absence -sidebar_label: Absence -original_id: org.apache.streampipes.processors.pattern-detection.flink.absence ---- - - - - - -

- -

- -*** - -## Description - -Detects whether an event does not arrive within a specified time after the occurrence of another event. - -*** - -## Required input - - -*** - -## Configuration - -Describe the configuration parameters here - -### 1st parameter - - -### 2nd parameter - -## Output diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.and.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.and.md deleted file mode 100644 index 68d03c174..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.and.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.processors.pattern-detection.flink.and -title: And -sidebar_label: And -original_id: org.apache.streampipes.processors.pattern-detection.flink.and ---- - - - - - -

- -

- -*** - -## Description - -Detects whether an event co-occurs with another event within a given time. - -*** - -## Required input - - -*** - -## Configuration - -Describe the configuration parameters here - -### 1st parameter - - -### 2nd parameter - -## Output diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.peak-detection.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.peak-detection.md deleted file mode 100644 index 4c003114c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.peak-detection.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.processors.pattern-detection.flink.peak-detection -title: Peak Detection -sidebar_label: Peak Detection -original_id: org.apache.streampipes.processors.pattern-detection.flink.peak-detection ---- - - - - - -

- -

- -*** - -## Description - -Detect peaks in time series data. - -*** - -## Required input - - -*** - -## Configuration - -Describe the configuration parameters here - -### 1st parameter - - -### 2nd parameter - -## Output diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.sequence.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.sequence.md deleted file mode 100644 index 4605707a8..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.pattern-detection.flink.sequence.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.processors.pattern-detection.flink.sequence -title: Sequence -sidebar_label: Sequence -original_id: org.apache.streampipes.processors.pattern-detection.flink.sequence ---- - - - - - -

- -

- -*** - -## Description - -Detects a sequence of events in the following form: Event A followed by Event B within X seconds. In addition, both streams can be matched by a common property value (e.g., a.machineId = b.machineId). - -*** - -## Required input - - -*** - -## Configuration - -Describe the configuration parameters here - -### 1st parameter - - -### 2nd parameter - -## Output diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.frequency.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.frequency.md deleted file mode 100644 index e68ae4143..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.frequency.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -id: org.apache.streampipes.processors.siddhi.frequency -title: Frequency Calculation -sidebar_label: Frequency Calculation -original_id: org.apache.streampipes.processors.siddhi.frequency ---- - - - - - -Frequency calculation with Siddhi CEP engine. - -*** - -## Description - -Calculates the frequency of the event stream. - -*** - -## Required input - -Does not have any specific input requirements. - -*** - -## Configuration - -### Time Unit - -The time unit of the window. e.g, hrs, min and sec - -### Time window length - -The time duration of the window in seconds. - -## Output - -Outputs the events according to the frequency specified. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.frequencychange.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.frequencychange.md deleted file mode 100644 index e4a99cd7e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.frequencychange.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -id: org.apache.streampipes.processors.siddhi.frequencychange -title: Frequency Change -sidebar_label: Frequency Change -original_id: org.apache.streampipes.processors.siddhi.frequencychange ---- - - - - -Notifies if there is a frequency change in events. - -*** - -## Description - -Detects when the frequency of the event stream changes. - -*** - -## Required input - -Does not have any specific input requirements. - -*** - -## Configuration - -### Time Unit - -The time unit of the window. e.g, hrs, min and sec - -### Percentage of Increase/Decrease - -Specifies the increase in percent (e.g., 100 indicates an increase by 100 percent within the specified time window). - -### Time window length - -The time duration of the window in seconds. - -## Output - -Outputs event if there is a frequency change according to the provided configuration. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.increase.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.increase.md deleted file mode 100644 index 64e3204a0..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.increase.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -id: org.apache.streampipes.processors.siddhi.increase -title: Trend -sidebar_label: Trend -original_id: org.apache.streampipes.processors.siddhi.increase ---- - - - - - -

- -

- -*** - -## Description - -Detects the increase of a numerical field over a customizable time window. Example: A temperature value increases by 10 percent within 5 minutes. - -*** - -## Required input - -There should be a number field in the event to observe the trend. - -*** - -## Configuration - -### Value to Observe - -Specifies the value field that should be monitored. - -### Increase/Decrease - -Specifies the type of operation the processor should perform. - -### Percentage of Increase/Decrease - -Specifies the increase in percent (e.g., 100 indicates an increase by 100 percent within the specified time window). - -### Time Window Length (Seconds) - -Specifies the size of the time window in seconds. - -## Output - -Outputs the events if there is a trend observed according to the configuration defined. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.numericalfilter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.numericalfilter.md deleted file mode 100644 index 4d84b5069..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.numericalfilter.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -id: org.apache.streampipes.processors.siddhi.numericalfilter -title: Numerical Filter (Siddhi) -sidebar_label: Numerical Filter (Siddhi) -original_id: org.apache.streampipes.processors.siddhi.numericalfilter ---- - - - - - -

- -

- -*** - -## Description -The Numerical Filter processor filters numerical values based on a given threshold. Therefore, it uses the lightweight -CEP engine Siddhi by issuing a Siddhi query, e.g. - -``` -// filter query to filter out all events not satisfying the condition -from inputStreamName[numberField<10] -select * -``` - -*** - -## Required input -The processor works with any input event that has one field containing a numerical value. - -*** - -## Configuration - -### Field -Specifies the field name where the filter operation should be applied on. - - -### Operation -Specifies the filter operation that should be applied on the field. - -### Threshold value -Specifies the threshold value. - -## Output -The processor outputs the input event if it satisfies the filter expression. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.sequence.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.sequence.md deleted file mode 100644 index 994baf584..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.sequence.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -id: org.apache.streampipes.processors.siddhi.sequence -title: Sequence Detection -sidebar_label: Sequence Detection -original_id: org.apache.streampipes.processors.siddhi.sequence ---- - - - - - -

- -

- -*** - -## Description - -Merges events from two event streams, when the top event arrives first and then the bottom event diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.stop.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.stop.md deleted file mode 100644 index 7d4ce23c8..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.siddhi.stop.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: org.apache.streampipes.processors.siddhi.stop -title: Stream Stop Detection -sidebar_label: Stream Stop Detection -original_id: org.apache.streampipes.processors.siddhi.stop ---- - - - - - -*** - -## Description - -Triggers an event when the input data stream stops sending events - -*** - -## Required input - -Does not have any specific input requirements. - -*** - -## Configuration - -### Time Window Length (Seconds) - -Specifies the size of the time window in seconds. - -## Output - -Outputs a similar event like below. - -``` -{ - 'timestamp': 1621243855401, - 'message': 'Event stream has stopped' -} -``` \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.statistics.flink.statistics-summary.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.statistics.flink.statistics-summary.md deleted file mode 100644 index d989db1ce..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.statistics.flink.statistics-summary.md +++ /dev/null @@ -1,44 +0,0 @@ ---- -id: org.apache.streampipes.processors.statistics.flink.statistics-summary -title: Statistics Summary -sidebar_label: Statistics Summary -original_id: org.apache.streampipes.processors.statistics.flink.statistics-summary ---- - - - - - -

- -

- -*** - -## Description - -Calculate simple descriptive summary statistics for each selected list property. - -The statistics contain: -* Minimum -* Maximum -* Sum -* Standard Deviation -* Variance - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.flink.languagedetection.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.flink.languagedetection.md deleted file mode 100644 index ca74d0f3c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.flink.languagedetection.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.processors.textmining.flink.languagedetection -title: Language Detection -sidebar_label: Language Detection -original_id: org.apache.streampipes.processors.textmining.flink.languagedetection ---- - - - - - -

- -

- -*** - -## Description - -Detects the language of a written text. - -*** - -## Required input - - -*** - -## Configuration - -Describe the configuration parameters here - -### 1st parameter - - -### 2nd parameter - -## Output diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.flink.wordcount.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.flink.wordcount.md deleted file mode 100644 index f965a1e4f..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.flink.wordcount.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.processors.textmining.flink.wordcount -title: Word Count -sidebar_label: Word Count -original_id: org.apache.streampipes.processors.textmining.flink.wordcount ---- - - - - - -

- -

- -*** - -## Description - -Count words on continuous text-based streams. - -*** - -## Required input - - -*** - -## Configuration - -Describe the configuration parameters here - -### 1st parameter - - -### 2nd parameter - -## Output diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.chunker.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.chunker.md deleted file mode 100644 index a1ef4e505..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.chunker.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -id: org.apache.streampipes.processors.textmining.jvm.chunker -title: Chunker (English) -sidebar_label: Chunker (English) -original_id: org.apache.streampipes.processors.textmining.jvm.chunker ---- - - - - - -

- -

- -*** - -## Description - -Segments given tokens into chunks (e.g. noun groups, verb groups, ...) and appends the found chunks to the stream. - -*** - -## Required input - -Needs a stream with two string list properties: -1. A list of tokens -2. A list of part-of-speech tags (the Part-of-Speech processing element can be used for that) - -*** - -## Configuration - -Assign the tokens and the part of speech tags to the corresponding stream property. -To use this component you have to download or train an openNLP model: -https://opennlp.apache.org/models.html - -## Output - -**Example:** - -Input: -``` -tokens: ["John", "is", "a", "Person"] -tags: ["NNP", "VBZ", "DT", "NN"] -``` - -Output: -``` -tokens: ["John", "is", "a", "Person"] -tags: ["NNP", "VBZ", "DT", "NN"] -chunks: ["John", "is", "a Person"] -chunkType: ["NP", "VP", "NP"]) -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.languagedetection.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.languagedetection.md deleted file mode 100644 index 0d05118cb..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.languagedetection.md +++ /dev/null @@ -1,170 +0,0 @@ ---- -id: org.apache.streampipes.processors.textmining.jvm.languagedetection -title: Language Detection -sidebar_label: Language Detection -original_id: org.apache.streampipes.processors.textmining.jvm.languagedetection ---- - - - - - -

- -

- -*** - -## Description - -Detects the language of incoming text. For a proper detection each text should contain at least 2 sentences. - -Supported languages: -* Afrikaans (afr) -* Arabic (ara) -* Asturian (ast) -* Azerbaijani (aze) -* Bashkir (bak) -* Belarusian (bel) -* Bengali (ben) -* Bosnian (bos) -* Breton (bre) -* Bulgarian (bul) -* Catalan (cat) -* Cebuano (ceb) -* Czech (ces) -* Chechen (che) -* Mandarin Chinese (cmn) -* Welsh (cym) -* Danish (dan) -* German (deu) -* Standard Estonian (ekk) -* Greek, Modern (ell) -* English (eng) -* Esperanto (epo) -* Estonian (est) -* Basque (eus) -* Faroese (fao) -* Persian (fas) -* Finnish (fin) -* French (fra) -* Western Frisian (fry) -* Irish (gle) -* Galician (glg) -* Swiss German (gsw) -* Gujarati (guj) -* Hebrew (heb) -* Hindi (hin) -* Croatian (hrv) -* Hungarian (hun) -* Armenian (hye) -* Indonesian (ind) -* Icelandic (isl) -* Italian (ita) -* Javanese (jav) -* Japanese (jpn) -* Kannada (kan) -* Georgian (kat) -* Kazakh (kaz) -* Kirghiz (kir) -* Korean (kor) -* Latin (lat) -* Latvian (lav) -* Limburgan (lim) -* Lithuanian (lit) -* Luxembourgish (ltz) -* Standard Latvian (lvs) -* Malayalam (mal) -* Marathi (mar) -* Minangkabau (min) -* Macedonian (mkd) -* Maltese (mlt) -* Mongolian (mon) -* Maori (mri) -* Malay (msa) -* Min Nan Chinese (nan) -* Low German (nds) -* Nepali (nep) -* Dutch (nld) -* Norwegian Nynorsk (nno) -* Norwegian Bokmål (nob) -* Occitan (oci) -* Panjabi (pan) -* Iranian Persian (pes) -* Plateau Malagasy (plt) -* Western Panjabi (pnb) -* Polish (pol) -* Portuguese (por) -* Pushto (pus) -* Romanian (ron) -* Russian (rus) -* Sanskrit (san) -* Sinhala (sin) -* Slovak (slk) -* Slovenian (slv) -* Somali (som) -* Spanish (spa) -* Albanian (sqi) -* Serbian (srp) -* Sundanese (sun) -* Swahili (swa) -* Swedish (swe) -* Tamil (tam) -* Tatar (tat) -* Telugu (tel) -* Tajik (tgk) -* Tagalog (tgl) -* Thai (tha) -* Turkish (tur) -* Ukrainian (ukr) -* Urdu (urd) -* Uzbek (uzb) -* Vietnamese (vie) -* Volapük (vol) -* Waray (war) -* Zulu (zul) - -*** - -## Required input - -A stream with a string property which contains a text. -The longer the text, the higher the accuracy of the language detector. - - -*** - -## Configuration - -Simply assign the correct output of the previous stream to the language detector input. -To use this component you have to download or train an openNLP model: -https://opennlp.apache.org/models.html - -## Output - -Adds two fields to the event: -1. String Property: The acronym of the detected language which can be seen above. -2. Double Property: The confidence of the detector that it found the correct language. Between 0 (not certain at all) and 1 (very certain). - - -**Example:** - -Input: `(text: "Hi, how are you?")` - -Output: `(text: "Hi, how are you?", language: "eng", confidenceLanguage: 0.89)` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.namefinder.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.namefinder.md deleted file mode 100644 index f538d7f4d..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.namefinder.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -id: org.apache.streampipes.processors.textmining.jvm.namefinder -title: Name Finder -sidebar_label: Name Finder -original_id: org.apache.streampipes.processors.textmining.jvm.namefinder ---- - - - - - -

- -

- -*** - -## Description - -Loads a trained model which finds names like locations or organizations. - -A list of trained models can be found here: http://opennlp.sourceforge.net/models-1.5/.\ -A guide on how to train a new model can be found here: https://opennlp.apache.org/docs/1.9.1/manual/opennlp.html#tools.namefind.training. - -*** - -## Required input - -A stream with a list of tokens from a text. - -*** - -## Configuration - -Configure the Name finder so that the tokens are assigned to the "List of Tokens" property - - -#### Model parameter - -The trained model which should be used to find the names. - -## Output - -Appends a string list property to the stream which contains all found names. - -**Example (with an loaded english person-name-model):** - -Input: `(tokens: ["Hi", "John", "Doe", "is", "here"])` - -Output: `(tokens: ["Hi", "John", "Doe", "is", "here"], foundNames: ["John Doe"])` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.partofspeech.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.partofspeech.md deleted file mode 100644 index f71ba21fc..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.partofspeech.md +++ /dev/null @@ -1,63 +0,0 @@ ---- -id: org.apache.streampipes.processors.textmining.jvm.partofspeech -title: Part of Speech (English) -sidebar_label: Part of Speech (English) -original_id: org.apache.streampipes.processors.textmining.jvm.partofspeech ---- - - - - - -

- -

- -*** - -## Description - -Takes in a stream of tokens and marks each token with a part-of-speech tag -The list of used suffixes can be found [here](https://www.ling.upenn.edu/courses/Fall_2003/ling001/penn_treebank_pos.html) - -*** - -## Required input - -A stream with a list property which contains the tokens. - -*** - -## Configuration - -Simply assign the correct output of the previous stream to the part of speech detector input. -To use this component you have to download or train an openNLP model: -https://opennlp.apache.org/models.html - -## Output - -Appends two list properties to the stream: -1. String list: The tag for each token -2. Double list: The confidence for each tag that it is indeed the given tag (between 0 and 1) - -**Example:** - -Input: `(tokens: ["Hi", "Joe"])` - -Output: `(tokens: ["Hi", "Joe"], tags: ["UH", "NNP"], confidence: [0.82, 0.87])` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection.md deleted file mode 100644 index a5ca7aa0e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -id: org.apache.streampipes.processors.textmining.jvm.sentencedetection -title: Sentence Detection (English) -sidebar_label: Sentence Detection (English) -original_id: org.apache.streampipes.processors.textmining.jvm.sentencedetection ---- - - - - - -

- -

- -*** - -## Description - -Detects sentences in a text and splits the text accordingly. Only works with english sentences. - -*** - -## Required input - -A stream with a string property which contains a text. - -*** - -## Configuration - -Simply assign the correct output of the previous stream to the tokenizer input. -To use this component you have to download or train an openNLP model: -https://opennlp.apache.org/models.html - -## Output - -Creates for each sentence in a text a new event in which it replaces the text with the sentence. - -**Example:** - -Input: `(text: "Hi, how are you? I am fine!")` - -Output: `(text: "Hi, how are you?")`, `(text: "I am fine!")` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.tokenizer.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.tokenizer.md deleted file mode 100644 index c7891e35f..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.textmining.jvm.tokenizer.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -id: org.apache.streampipes.processors.textmining.jvm.tokenizer -title: Tokenizer (English) -sidebar_label: Tokenizer (English) -original_id: org.apache.streampipes.processors.textmining.jvm.tokenizer ---- - - - - - -

- -

- -*** - -## Description - -Segments a given text into Tokens (usually words, numbers, punctuations, ...). Works best with english text. - -*** - -## Required input - -A stream with a string property which contains a text. - -*** - -## Configuration - -Simply assign the correct output of the previous stream to the tokenizer input. -To use this component you have to download or train an openNLP model: -https://opennlp.apache.org/models.html - -## Output - -Adds a list to the stream which contains all tokens of the corresponding text. - -**Example:** - -Input: `(text: "Hi, how are you?")` - -Output: `(text: "Hi, how are you?", tokens: ["Hi", ",", "how", "are", "you", "?"])` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-converter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-converter.md deleted file mode 100644 index c577f1297..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-converter.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.flink.field-converter -title: Field Converter -sidebar_label: Field Converter -original_id: org.apache.streampipes.processors.transformation.flink.field-converter ---- - - - - - -

- -

- -*** - -## Description - -Converts a string value to a number data type. - - -*** - -## Required input -This processor requires an event that contains at least one string valued field. - -*** - -## Configuration - -### Field -Specifies the string field that is converted. - -### Datatype -Specifies the target datatype depending on the previously specified string field. - -## Output -Output event in the specified target datatype. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-mapper.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-mapper.md deleted file mode 100644 index 47f5174b6..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-mapper.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.flink.field-mapper -title: Field Mapper -sidebar_label: Field Mapper -original_id: org.apache.streampipes.processors.transformation.flink.field-mapper ---- - - - - - -

- -

- -*** - -## Description - -Replaces one or more fields with a new field and computes a hash value of these fields - -*** - -## Configuration - -* Fields: Fields that will be mapped into a property -* Name of the new field - -*** - -## Example -Merge two fields into a hash value -### Input event -``` -{ - "timestamp":1586380104915, - "mass_flow":4.3167, - "temperature":40.05, - "sensorId":"flowrate01" -} -``` - -### Configuration -* Fields: mass_flow, temperature -* Name of new field: demo - -### Output event -``` -{ - "timestamp":1586380104915, - "sensorId":"flowrate01" - "demo":"8ae11f5c83610104408d485b73120832", -} -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-renamer.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-renamer.md deleted file mode 100644 index f879a7104..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.field-renamer.md +++ /dev/null @@ -1,69 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.flink.field-renamer -title: Field Renamer -sidebar_label: Field Renamer -original_id: org.apache.streampipes.processors.transformation.flink.field-renamer ---- - - - - - -

- -

- -*** - -## Description - -Replaces the runtime name of an event property with a custom defined name. Useful for data ingestion purposes where a specific event schema is required. - - -*** - -## Required input -There is no specific input required. - -*** - -## Configuration - -### Field -Specifies the field to rename. - -### NewFieldName -Specifies the new runtime name of the field. - -## Output -Example: - -Old Output: -``` -{ - 'timestamp': 16003000, -} -``` - -New Ouput: -``` -{ - 'time': 16003000, -} -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.fieldhasher.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.fieldhasher.md deleted file mode 100644 index b2540be32..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.fieldhasher.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.flink.fieldhasher -title: Field Hasher -sidebar_label: Field Hasher -original_id: org.apache.streampipes.processors.transformation.flink.fieldhasher ---- - - - - - -

- -

- -*** - -## Description - -The Field Hasher uses an algorithm to encode values in a field. The Field Hasher can use MD5, SHA1 or SHA2 to hash field values. - -*** - -## Required input -This processor requires at least one field of type string. - -*** - -## Configuration - -### Field -Specifies the string field that will be encoded. - -### Hash Algorithm -Specifies the algorithm used to encode the string field. The following algorithms -are available: SHA2, MD5 or SHA1. - -## Output -The encoded string field. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.measurement-unit-converter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.measurement-unit-converter.md deleted file mode 100644 index c498aae77..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.measurement-unit-converter.md +++ /dev/null @@ -1,54 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.flink.measurement-unit-converter -title: Measurement Unit Converter -sidebar_label: Measurement Unit Converter -original_id: org.apache.streampipes.processors.transformation.flink.measurement-unit-converter ---- - - - - - -

- -

- -*** - -## Description - -Converts a unit of measurement to another one. - -*** - -## Required input - - -*** - -## Configuration - -Describe the configuration parameters here - -### 1st parameter - - -### 2nd parameter - -## Output diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.processor.boilerplate.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.processor.boilerplate.md deleted file mode 100644 index 998c90ccf..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.flink.processor.boilerplate.md +++ /dev/null @@ -1,50 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.flink.processor.boilerplate -title: Boilerplate Removal -sidebar_label: Boilerplate Removal -original_id: org.apache.streampipes.processors.transformation.flink.processor.boilerplate ---- - - - - - -

- -

- -*** - -## Description - -Removes boilerplate tags from HTML and extracts fulltext - -*** - -## Required input -Requires a Text field containing the HTML - -*** - -## Configuration - -Select the extractor type and output mode - -## Output -Appends a new text field containing the content of the html page without the boilerplate diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter.md deleted file mode 100644 index e53465c05..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.booloperator.counter -title: Boolean Counter -sidebar_label: Boolean Counter -original_id: org.apache.streampipes.processors.transformation.jvm.booloperator.counter ---- - - - - - -

- -

- -*** - -## Description - -This processor monitors a boolean value and counts how often the value of the boolean changes. -A user can configure whether the changes from FALSE to TRUE, TRUE to FALSE, or BOTH changes should be counted. - -*** - -## Required input - -A boolean value is required in the data stream and can be selected with the field mapping. - -### Boolean Field - -The boolean value to be monitored. - -*** - -## Configuration - -A user can configure whether the changes from TRUE to FALSE, FALSE to TRUE, or all changes of the boolean value should be counted. - -### Flank parameter - -Either: -* TRUE -> FALSE: Increase counter on a true followed by a false -* FALSE -> TRUE: Increase counter on a false followed by a true -* BOTH: Increase counter on each change of the boolean value on two consecutive events - -## Output - -Adds an additional numerical field with the current count value to the event. Events are just emitted when the counter changes. -Runtime Name: countField diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter.md deleted file mode 100644 index 3d9110cbb..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.booloperator.inverter -title: Boolean Inverter -sidebar_label: Boolean Inverter -original_id: org.apache.streampipes.processors.transformation.jvm.booloperator.inverter ---- - - - - - -

- -

- -*** - -## Description - -This processor requires a boolean value in the data stream and inverts its value. (e.g. true -> flase) - -*** - -## Required input - -### Boolean Field - -The boolean value to be inverted. - -*** - -## Configuration -No further configuration required - -## Output -The output schema is the same as the input schema. Just the value of the property is changed. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping.md deleted file mode 100644 index bc5f36cfc..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping.md +++ /dev/null @@ -1,71 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping -title: Measure Time Between Two Sensors -sidebar_label: Measure Time Between Two Sensors -original_id: org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping ---- - - - - - -

- -

- -*** - -## Description - -This processor can be used to measure the time between two boolean sensors. -For example on a conveyor, where one sensor is placed on the left and one senor placed on the right. -Parts are transported on the conveyor and the sensors are boolean sensors detecting those parts. -The time is measured between the two sensors as well as the amount of complete transportation's is counted. -The measurement is initialized once the left sensor is true and stopped once the right sensor is true. -There can also be multiple parts on the conveyor as long as the individual parts do not change. - - -

- -

- -*** - -## Required input -Requires two boolean fields in the datastream. - -### Left Field -The left field starts the timer when value is true. - -### Right Field -The right field stops the timer and emits the event when its value is true. - -*** - -## Configuration -No furhter configuration is required. - -## Output -Appends two fields to the input event. - -### Timer Field -The timer field is a numeric value representing the time between the two sensors. Runtime name: measured_time - -### Counter -The counter indicated how many events where emitted by this component. Runtime name: counter diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer.md deleted file mode 100644 index a9c05e710..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.booloperator.timer -title: Boolean Timer -sidebar_label: Boolean Timer -original_id: org.apache.streampipes.processors.transformation.jvm.booloperator.timer ---- - - - - - -

- -

- -*** - -## Description - -This processor measures how long a boolean value does not change. Once the value is changes the event with the measured time is emitted. - - -*** - -## Required input - -A boolean value is required in the data stream. - -### Field - -The boolean field which is monitored for state changes. - -*** - -## Configuration - -### Timer value -Define whether it should be measured how long the value is true or how long the value is false. - -## Output -Appends a field with the time how long the value did not change. Is emitted on the change of the boolean value. Runtime name: measured_time diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.changed-value.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.changed-value.md deleted file mode 100644 index 925452b1b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.changed-value.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.changed-value -title: Value Changed -sidebar_label: Value Changed -original_id: org.apache.streampipes.processors.transformation.jvm.changed-value ---- - - - - - -

- -

- -*** - -## Description - -This processor sends out an event everytime a specific object changes. It also adds a timestamp in ms from the system time. - -*** - -## Configuration -Select property to monitor for changes - -Describe the configuration parameters here - -## Output -Emit an event on change and append a timestamp when the change occured diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.count-array.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.count-array.md deleted file mode 100644 index cd7c5616c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.count-array.md +++ /dev/null @@ -1,56 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.count-array -title: Count Array -sidebar_label: Count Array -original_id: org.apache.streampipes.processors.transformation.jvm.count-array ---- - - - - - -

- -

- -*** - -## Description - -This processor takes a list field, computes the size of the list and appends the result to the event. - -*** - -## Required input - -This processor works with any event that has a field of type ``list``. - -*** - -## Configuration - -Describe the configuration parameters here - -### List Field - -The field containing the list that should be used. - -## Output - -Outputs the incoming event while appending the list size (named ``countValue``) to the incoming event. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata.md deleted file mode 100644 index 258acf9d5..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.csvmetadata -title: CSV Metadata Enricher -sidebar_label: CSV Metadata Enricher -original_id: org.apache.streampipes.processors.transformation.jvm.csvmetadata ---- - - - - -Enrich a datastream with information provided in a CSV file. -The data of the CSV file is matched by an id column with a property value of a String in the data stream. - -*** - -## Description -Upload a CSV file with static meta information that will be appended to each event. -The file can contain different information for different keys in the stream. - - -### Structure of CSV file -The first row containes the runtime names for the properties to insert. -Once the file is uploaded the user can select which column to use for the matching property and which values should be appended. -Delimiter: ';' - - -*** - -## Example -Add the location of a production line to the event - -### Input event -``` -{ - 'line_id': 'line1', - 'timestamp': 1586378041 -} -``` - -### CSV File -``` -production_line;location -line1;germany -line2;uk -line3;usa -``` - -### Configuration -* The field that is used for the lookup (Example: line_id) -* The CSV file (Example: Upload the csv file) -* Field to match (Example: production_line) -* Fields to append (Example: location) - -### Output event -``` -{ - 'line_id': 'line1', - 'timestamp': 1586378041, - 'location': 'germany' -} -``` diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.duration-value.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.duration-value.md deleted file mode 100644 index 3ed23c240..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.duration-value.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.duration-value -title: Calculate Duration -sidebar_label: Calculate Duration -original_id: org.apache.streampipes.processors.transformation.jvm.duration-value ---- - - - - - -

- -

- -*** - -## Description - -This processor calculates the duration for a given stream with a start timestamp and an end timestamp. - -*** - -## Required input -Two timestamp fields - -*** - -## Configuration - -* Start Timestamp: The first timestamp (t1) -* End Timestamp: The second timestamp (t2) -* Time Unit of the result - -## Output -Appends a new field with the difference of t2 and t1 diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge.md deleted file mode 100644 index 8d6bc9ab6..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge -title: Signal Edge Filter -sidebar_label: Signal Edge Filter -original_id: org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge ---- - - - - - -

- -

- -*** - -## Description - -Observes a boolean value and forwards the event when a signal edge is detected - -*** - -## Required input - -### Boolean Field -Boolean field that is observed - -*** - -## Configuration -### Kind of edge -* Detect rising edges -* Detect falling edges -* Detect both - -### Delay -Defines for how many events the signal must be stable before result is emitted. -(E.g. if set to 2, the result is not emitted if value toggles between true and false, it fires when two consecutive events are detected after the flank) - -## Output -Emits input event, when the signal edge is detected diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state.md deleted file mode 100644 index 9bbe79264..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state -title: Boolean To State -sidebar_label: Boolean To State -original_id: org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state ---- - - - - - -

- -

- -*** - -## Description - -Converts boolean fields to a state string representing the current state of the system. -This processor requires one or multiple boolean values in the data stream. -For the selected value which is true, the runtime name is added as the state field. -*** - -## Required input - -### Boolean Fields -Boolean fields that are converted to the state when true - -### Default State -When all boolean values are false, a default state can be defined - -### Mapping Configuration -Configuration to provide a string mapping for each possible value. -On the left ist the value of the runtime name and on the right the new value (e.g. {"runtimeName": "newValue"}). - -*** - -## Configuration -No further configuration required - -## Output -The output contains a new value with the string values of the state diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.buffer.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.buffer.md deleted file mode 100644 index 309e8b44a..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.buffer.md +++ /dev/null @@ -1,55 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.processor.state.buffer -title: State Buffer -sidebar_label: State Buffer -original_id: org.apache.streampipes.processors.transformation.jvm.processor.state.buffer ---- - - - - - -

- -

- -*** - -## Description - -Buffers values of a sensor, while state does not change. -Select a state field in the event. Events are buffered as long as state field does not change. When it changes result event is emitted. - -*** - -## Required input - -Define the state and sensor value field - -### Timestamp -A mapping property for a timestamp field - -### State -Select the field representing the state - -### Sensor value to cache -Select the field with the numerical values to buffer - -## Output -Emits a new event on state change, with the fields `timestamp`, `state`, and a list containing all `sensor values`. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.buffer.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.buffer.md deleted file mode 100644 index 8c3c4e352..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.buffer.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.buffer -title: State Buffer Labeler -sidebar_label: State Buffer Labeler -original_id: org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.buffer ---- - - - - - -

- -

- -*** - -## Description - -Apply a rule to a time-series recorded during a state of a machine. (E.g. when minimum value is lower then 10, add label `not ok` else add label `ok`) - - -*** - -## Required input - -Requires a list with sensor values and a field defining the state - -### Sensor values - -An array representing sensor values recorded during the state. - -### State field - -A field representing the state when the sensor values where recorded. - -*** - -## Configuration - -### Select a specific state -When you are interested in the values of a specific state add it here. All other states will be ignored. To get results of all states enter `*` - -### Operation -Operation that will be performed on the sensor values (calculate `maximim`, or `average`, or `minimum`) - -### Condition -Define a rule which label to add. Example: `<;5;nok` means when the calculated value is smaller then 5 add label ok. -The default label can be defined with `*;nok`. -The first rule that is true defines the label. Rules are applied in the same order as defined here. - - -## Output -Appends a new field with the label defined in the Condition Configuration diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number.md deleted file mode 100644 index 60e8800c2..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number -title: Number Labeler -sidebar_label: Number Labeler -original_id: org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number ---- - - - - - -

- -

- -*** - -## Description - -Apply a rule to a value of a field. (E.g. when minimum value is lower then 10, add label `not ok` else add label `ok`) - -*** - -## Required input - -Requires a sensor value - -### Sensor value - -A number representing the current sensor value. - -*** - -## Configuration - -### Condition -Define a rule which label to add. Example: `<;5;nok` means when the calculated value is smaller then 5 add label ok. -The default label can be defined with `*;nok`. -The first rule that is true defines the label. Rules are applied in the same order as defined here. - - -## Output -Appends a new field with the label defined in the Condition Configuration diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state.md deleted file mode 100644 index 5f533eb01..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state -title: String To State -sidebar_label: String To State -original_id: org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state ---- - - - - - -

- -

- -*** - -## Description - -Convert string fields to a state representing the current state of the system. -This processor requires one or multiple string values in the data stream. -For each of the selected values is added to the states field. -*** - -## Required input - -### String Fields -String fields that are added to the state array - -*** - -## Configuration -No further configuration required - -## Output -The output contains a new value with the string values of the state diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor.md deleted file mode 100644 index 5132b6017..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor -title: Timestamp Extractor -sidebar_label: Timestamp Extractor -original_id: org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor ---- - - - - - -

- -

- -*** - -## Description - -This processor extracts a timestamp into the individual time fields (e.g. day field, hour field, ....) - -*** - -## Required input - -This processor requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema -.org/DateTime``. - -*** - -## Configuration - -### Timestamp Field - -The field of the event containing the timestamp to parse. - -### Extract Fields - -Select the individual parts of the timestamp that should be extracted, e.g., Year, Minute and Day. - -## Output - -The output of this processor is a new event that contains the fields selected by the ``Extract Fields`` parameter. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.split-array.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.split-array.md deleted file mode 100644 index 864ddd673..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.split-array.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.split-array -title: Split Array -sidebar_label: Split Array -original_id: org.apache.streampipes.processors.transformation.jvm.split-array ---- - - - - - -

- -

- -*** - -## Description - -This processor takes an array of event properties and creates an event for each of them. Further property of the events can be added to each element. - -*** - -## Required input - -This processor works with any event that has a field of type ``list``. - -*** - -## Configuration - -### Keep Fields - -Fields of the event that should be kept in each resulting event. - -### List field - -The name of the field that contains the list values that should be split. - - -## Output - -This data processor produces an event with all fields selected by the ``Keep Fields`` parameter and all fields of the - selected list field. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter.md deleted file mode 100644 index f9d674dae..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.stringoperator.counter -title: String Counter -sidebar_label: String Counter -original_id: org.apache.streampipes.processors.transformation.jvm.stringoperator.counter ---- - - - - - -

- -

- -*** - -## Description - -This processor monitors a string field and counts how often the value of the string changes. Hereby, a change is characterized by -the value of the field before and the value after the change, combined forming a pair. The processor keeps track of the counter for each pair. - -*** - -## Required input - -A string field is required in the data stream and can be selected with the field mapping. - -### String Field - -The string field to be monitored. - -*** - -## Configuration - -(no further configuration required) - -## Output -The following three fields are appended to the event: -* [counter] numerical field with the current count value for the given value pair -* [change_from] the value of the string before the change -* [change_to] the value of the string after the change - -The event is emitted whenever the value of the string field changes. - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer.md deleted file mode 100644 index cb1da0a1e..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.stringoperator.timer -title: String Timer -sidebar_label: String Timer -original_id: org.apache.streampipes.processors.transformation.jvm.stringoperator.timer ---- - - - - - -

- -

- -*** - -## Description - -This processor measures how long a value of a string field does not change. Once the value is changes the event with the measured time and the corresponding string value is emitted. - - -*** - -## Required input - -A string field is required in the data stream. - -### Field - -The string field which is monitored for any value changes. - - -*** - -## Configuration - -### Output Frequency - -Define when an event should be emitted, either on each input event or just when the string value changes. - -## Output -The following two fields are appended to the event: -* [measured_time] the measured time for the string value to not change -* [field_value] the corresponding string value - -The event is emitted whenever the value of the string field changes. - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.taskduration.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.taskduration.md deleted file mode 100644 index 9bb2ff189..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.taskduration.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.taskduration -title: Task Duration -sidebar_label: Task Duration -original_id: org.apache.streampipes.processors.transformation.jvm.taskduration ---- - - - - - - -*** - -## Description - -This processors computes the duration of a task, i.e., a field containing a task description. It outputs an event - every time this task value changes and computes the duration between the first occurrence of this task and the - current event. For instance, you can use this event to calculate the time a specific process step requires. -*** - -## Required input - -A timestamp value is required and a field containing a task value. - -*** - -## Configuration - -(no further configuration required) - -## Output - -Emits an event that contains the process step, built from the names of the first task identifier and the identifier - of the subsequent task. In addition, the duration is part of the output event, provided in milliseconds. \ No newline at end of file diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean.md deleted file mode 100644 index 0028a9212..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -id: org.apache.streampipes.processors.transformation.jvm.transform-to-boolean -title: Transform to boolean -sidebar_label: Transform to boolean -original_id: org.apache.streampipes.processors.transformation.jvm.transform-to-boolean ---- - - - - - -

- -

- -*** - -## Description -This processors transforms numbers and strings to boolean values. - - -*** - -## Required input -A string with the values "true", "True", "false", "False" or a number with value 1.0, 1, 0, or 0.0 - -*** - -## Configuration - -Select fields that should be converted to boolean. - -## Output -Selected properties of input events are transformed to booleans. -When the value is not valid an error message is logged and the event is discarde. diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.protocol.set.file.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.protocol.set.file.md deleted file mode 100644 index 37adf4d65..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.protocol.set.file.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.protocol.set.file -title: File Set -sidebar_label: File Set -original_id: org.apache.streampipes.protocol.set.file ---- - - - - - -

- -

- -*** - -## Description - -Reads the content from a local file. - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.protocol.set.http.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.protocol.set.http.md deleted file mode 100644 index 8f316fe29..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.protocol.set.http.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -id: org.apache.streampipes.protocol.set.http -title: HTTP Set -sidebar_label: HTTP Set -original_id: org.apache.streampipes.protocol.set.http ---- - - - - - -

- -

- -*** - -## Description - -Regularly poll an HTTP endpoint - -*** - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest.md deleted file mode 100644 index 401fe35e3..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -id: org.apache.streampipes.sinks.brokers.jvm.bufferrest -title: Buffered REST Publisher -sidebar_label: Buffered REST Publisher -original_id: org.apache.streampipes.sinks.brokers.jvm.bufferrest ---- - - - - - -

- -

- -*** - -## Description - -Collects a given amount of events into a JSON array. Once this event count is reached -the JSON array is posted to the given REST interface. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### REST URL - -The complete URL of the REST endpoint. - -### Buffer Size - -The amount of events before sending. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.jms.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.jms.md deleted file mode 100644 index 61816d83b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.jms.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -id: org.apache.streampipes.sinks.brokers.jvm.jms -title: JMS Publisher -sidebar_label: JMS Publisher -original_id: org.apache.streampipes.sinks.brokers.jvm.jms ---- - - - - - -

- -

- -*** - -## Description - -Publishes events to a message broker (e.g., ActiveMQ) using the Java Message Service (JMS) protocol. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### JMS Broker Settings - -The basic settings to connect to the broker. -The JMS broker URL indicates the URL of the broker (e.g., tcp://localhost), the port indicates the port of the broker - (e.g., 61616) - - -### JMS Topic - -The topic where events should be sent to. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.kafka.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.kafka.md deleted file mode 100644 index 448b7c34a..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.kafka.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -id: org.apache.streampipes.sinks.brokers.jvm.kafka -title: Kafka Publisher -sidebar_label: Kafka Publisher -original_id: org.apache.streampipes.sinks.brokers.jvm.kafka ---- - - - - - -

- -

- -*** - -## Description - -Publishes events to Apache Kafka. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### Kafka Broker Settings - -The basic settings to connect to the broker. -The Kafka broker URL indicates the URL of the broker (e.g., localhost), the port indicates the port of the broker - (e.g., 9092) - - -### Kafka Topic - -The topic where events should be sent to. - - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.mqtt.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.mqtt.md deleted file mode 100644 index f4214ed01..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.mqtt.md +++ /dev/null @@ -1,62 +0,0 @@ ---- -id: org.apache.streampipes.sinks.brokers.jvm.mqtt -title: MQTT Publisher -sidebar_label: MQTT Publisher -original_id: org.apache.streampipes.sinks.brokers.jvm.mqtt ---- - - - - - -

- -

- -*** - -## Description - -Publishes events to MQTT. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### MQTT Broker Settings - -The basic settings to connect to the broker. -The MQTT broker URL indicates the URL of the broker (e.g., localhost), the port indicates the port of the broker -(e.g., 9092) - - -### MQTT Topic - -The topic where events should be sent to. - - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.nats.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.nats.md deleted file mode 100644 index cc91a51d4..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.nats.md +++ /dev/null @@ -1,79 +0,0 @@ ---- -id: org.apache.streampipes.sinks.brokers.jvm.nats -title: NATS Publisher -sidebar_label: NATS Publisher -original_id: org.apache.streampipes.sinks.brokers.jvm.nats ---- - - - - - -

- -

- -*** - -## Description - -Publishes events to NATS broker. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### NATS Subject - -The subject (topic) where events should be sent to. - -### NATS Broker URL - -The URL to connect to the NATS broker. It can be provided multiple urls separated by commas(,). - (e.g., nats://localhost:4222,nats://localhost:4223) - -### Username - -The username to authenticate the client with NATS broker. - -It is an optional configuration. - -### NATS Broker URL - -The password to authenticate the client with NATS broker. - -It is an optional configuration. - -### NATS Connection Properties - -All other possible connection configurations that the nats client can be created with. -It can be provided as key value pairs separated by colons(:) and commas(,). - (e.g., io.nats.client.reconnect.max:1, io.nats.client.timeout:1000) - -It is an optional configuration. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.pulsar.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.pulsar.md deleted file mode 100644 index fb97bbf1d..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.pulsar.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -id: org.apache.streampipes.sinks.brokers.jvm.pulsar -title: Pulsar Publisher -sidebar_label: Pulsar Publisher -original_id: org.apache.streampipes.sinks.brokers.jvm.pulsar ---- - - - - - -

- -

- -*** - -## Description - -Publishes events to Apache Pulsar. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### Pulsar Broker Hostname - -The hostname to connect to the broker. - -### Pulsar Broker Port - -The port to connect to the broker (e.g., 6650) - - -### Pulsar Topic - -The topic where events should be sent to. - - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq.md deleted file mode 100644 index c0a34e27c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -id: org.apache.streampipes.sinks.brokers.jvm.rabbitmq -title: RabbitMQ Publisher -sidebar_label: RabbitMQ Publisher -original_id: org.apache.streampipes.sinks.brokers.jvm.rabbitmq ---- - - - - - -

- -

- -*** - -## Description - -Forwards events to a RabbitMQ broker - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### Host - -The hostname of the RabbitMQ broker. - -### Port - -The port of the RabbitMQ broker. - -### User - -The username used to connect to the RabbitMQ broker. - -### Password - -The password used to connect to the RabbitMQ broker. - -### Exchange Name - -The name of the exchange. - -### RabbitMQ Topic - -The topic where events should be sent to. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.rest.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.rest.md deleted file mode 100644 index 981c7e5b0..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.brokers.jvm.rest.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -id: org.apache.streampipes.sinks.brokers.jvm.rest -title: REST Publisher -sidebar_label: REST Publisher -original_id: org.apache.streampipes.sinks.brokers.jvm.rest ---- - - - - -

- -

- -*** - -## Description - -Posts a JSON representation of an event to a REST interface. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### REST URL - -The complete URL of the REST endpoint. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.ditto.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.ditto.md deleted file mode 100644 index 8a7089a16..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.ditto.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -id: org.apache.streampipes.sinks.databases.ditto -title: Eclipse Ditto -sidebar_label: Eclipse Ditto -original_id: org.apache.streampipes.sinks.databases.ditto ---- - - - - - -

- -

- -*** - -## Description - -Forwards events to the Eclipse Ditto API. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### Fields to send - -The fields that should be stored as a property to Ditto endpoint. - -### Ditto API endpoint - -The endpoint URL of the Ditto instance. - -### Username - -The username to authenticate the Ditto endpoint. - -### Password - -The password to authenticate the Ditto endpoint. - -### Thing ID - -The Ditto thing ID. - -#### Feature ID - -The Ditto feature ID - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.flink.elasticsearch.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.flink.elasticsearch.md deleted file mode 100644 index 5e29bbbb4..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.flink.elasticsearch.md +++ /dev/null @@ -1,61 +0,0 @@ ---- -id: org.apache.streampipes.sinks.databases.flink.elasticsearch -title: Elasticsearch -sidebar_label: Elasticsearch -original_id: org.apache.streampipes.sinks.databases.flink.elasticsearch ---- - - - - - -

- -

- -*** - -## Description - -Stores data in an Elasticsearch database. - -*** - -## Required input - -This sink requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema -.org/DateTime``. - -*** - -## Configuration - -Describe the configuration parameters here - -### Timestamp Field - -The field which contains the required timestamp. - -### Index Name - -The name of the Elasticsearch index where events are stored to. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.couchdb.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.couchdb.md deleted file mode 100644 index 4e528a92d..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.couchdb.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -id: org.apache.streampipes.sinks.databases.jvm.couchdb -title: CouchDB -sidebar_label: CouchDB -original_id: org.apache.streampipes.sinks.databases.jvm.couchdb ---- - - - - - -

- -

- -*** - -## Description - -Stores events in an Apache CouchDB database. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -Describe the configuration parameters here - -### Hostname - -The hostname of the CouchDB instance. - -### Port - -The port of the CouchDB instance. - -### Database Name - -The name of the database where events will be stored - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.influxdb.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.influxdb.md deleted file mode 100644 index 98e27a58b..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.influxdb.md +++ /dev/null @@ -1,86 +0,0 @@ ---- -id: org.apache.streampipes.sinks.databases.jvm.influxdb -title: InfluxDB -sidebar_label: InfluxDB -original_id: org.apache.streampipes.sinks.databases.jvm.influxdb ---- - - - - - -

- -

- -*** - -## Description - -Stores events in an InfluxDB. - -*** - -## Required input - -This sink requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema -.org/DateTime``. - -*** - -## Configuration - -### Hostname - -The hostname/URL of the InfluxDB instance. (Include http(s)://). - -### Port - -The port of the InfluxDB instance. - -### Database Name - -The name of the database where events will be stored. - -### Measurement Name - -The name of the Measurement where events will be stored (will be created if it does not exist). - -### Username - -The username for the InfluxDB Server. - -### Password - -The password for the InfluxDB Server. - -### Timestamp Field - -The field which contains the required timestamp. - -### Buffer Size - -Indicates how many events are written into a buffer, before they are written to the database. - -### Maximum Flush - -The maximum waiting time for the buffer to fill the Buffer size before it will be written to the database in ms. -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.iotdb.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.iotdb.md deleted file mode 100644 index f7c542dce..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.iotdb.md +++ /dev/null @@ -1,71 +0,0 @@ ---- -id: org.apache.streampipes.sinks.databases.jvm.iotdb -title: IoTDB -sidebar_label: IoTDB -original_id: org.apache.streampipes.sinks.databases.jvm.iotdb ---- - - - - - -

- -

- -*** - -## Description - -Stores events in a IoTDB database. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### Hostname - -The hostname of the IoTDB instance. - -### Port - -The port of the IoTDB instance (default 6667). - -### Storage Group Name - -The name of the storage group where events will be stored (will be created if it does not exist). -For each element of the stream a new time series will be created. - -### Username - -The username for the IoTDB Server. - -### Password - -The password for the IoTDB Server. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.mysql.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.mysql.md deleted file mode 100644 index 28d208658..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.mysql.md +++ /dev/null @@ -1,75 +0,0 @@ ---- -id: org.apache.streampipes.sinks.databases.jvm.mysql -title: MySQL Database -sidebar_label: MySQL Database -original_id: org.apache.streampipes.sinks.databases.jvm.mysql ---- - - - - - -

- -

- -*** - -## Description - -Stores events to a MySQL Database. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### Hostname - -The hostname of the MySQL Server. - -### Port - -The port of the MySQL Server (default: 3306). - -### Database - -The database of the MySQL Server the data is written to. - -### Table - -The table name of the MySQL Server the data is written to. - -### Username - -The username for the MySQL Server. - -### Password - -The password for the MySQL Server. - -## Output - -(not applicable for data sinks) - diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.opcua.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.opcua.md deleted file mode 100644 index 20320e06c..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.opcua.md +++ /dev/null @@ -1,73 +0,0 @@ ---- -id: org.apache.streampipes.sinks.databases.jvm.opcua -title: OPC-UA -sidebar_label: OPC-UA -original_id: org.apache.streampipes.sinks.databases.jvm.opcua ---- - - - - - -

- -

- -*** - -## Description - -Stores events in an Apache CouchDB database. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -Describe the configuration parameters here - -### Hostname - -The hostname of the OPC-UA server. - -### Port - -The port of the OPC-UA server. - -### Namespace Index - -The namespace index in which the node should be written - -### Node Id - -The node id of the resulting node - -### Number Mapping - -The property of the event that should be written to the OPC-UA server - - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.postgresql.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.postgresql.md deleted file mode 100644 index 56596e711..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.databases.jvm.postgresql.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -id: org.apache.streampipes.sinks.databases.jvm.postgresql -title: PostgreSQL -sidebar_label: PostgreSQL -original_id: org.apache.streampipes.sinks.databases.jvm.postgresql ---- - - - - - -

- -

- -*** - -## Description - -Stores events in a Postgres database. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### Hostname - -The hostname of the PostgreSQL instance. - -### Port - -The port of the PostgreSQL instance (default 5432). - -### Database Name - -The name of the database where events will be stored - -### Table Name - -The name of the table where events will be stored (will be created if it does not exist) - -### Username - -The username for the PostgreSQL Server. - -### Password - -The password for the PostgreSQL Server. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.dashboard.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.dashboard.md deleted file mode 100644 index 0ec51c1fc..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.dashboard.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -id: org.apache.streampipes.sinks.internal.jvm.dashboard -title: Dashboard Sink -sidebar_label: Dashboard Sink -original_id: org.apache.streampipes.sinks.internal.jvm.dashboard ---- - - - - - -

- -

- -*** - -## Description - -This sink visualizes data streams in the StreamPipes dashboard. -Visualizations can be configured in Live Dashboard of StreamPipes after the pipeline has been started. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -No further configuration necessary, individual visualizations can be configured in the Dashboard itself. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.datalake.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.datalake.md deleted file mode 100644 index aeea4aabd..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.datalake.md +++ /dev/null @@ -1,86 +0,0 @@ ---- -id: org.apache.streampipes.sinks.internal.jvm.datalake -title: Data Lake -sidebar_label: Data Lake -original_id: org.apache.streampipes.sinks.internal.jvm.datalake ---- - - - - - -

- -

- -*** - -## Description - -Stores events in an InfluxDB. - -*** - -## Required input - -This sink requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema -.org/DateTime``. - -*** - -## Configuration - -### Hostname - -The hostname/URL of the InfluxDB instance. (Include http(s)://). - -### Port - -The port of the InfluxDB instance. - -### Database Name - -The name of the database where events will be stored. - -### Measurement Name - -The name of the Measurement where events will be stored (will be created if it does not exist). - -### Username - -The username for the InfluxDB Server. - -### Password - -The password for the InfluxDB Server. - -### Timestamp Field - -The field which contains the required timestamp. - -### Buffer Size - -Indicates how many events are written into a buffer, before they are written to the database. - -### Maximum Flush - -The maximum waiting time for the buffer to fill the Buffer size before it will be written to the database in ms. -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.notification.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.notification.md deleted file mode 100644 index e6fbe37ca..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.internal.jvm.notification.md +++ /dev/null @@ -1,58 +0,0 @@ ---- -id: org.apache.streampipes.sinks.internal.jvm.notification -title: Notification -sidebar_label: Notification -original_id: org.apache.streampipes.sinks.internal.jvm.notification ---- - - - - - -

- -

- -*** - -## Description - -Displays a notification in the UI panel of StreamPipes. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -### Notification Title - -The title of the notification. - -### Content - -The notification message. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.email.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.email.md deleted file mode 100644 index 0d32b6722..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.email.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -id: org.apache.streampipes.sinks.notifications.jvm.email -title: Email Notification -sidebar_label: Email Notification -original_id: org.apache.streampipes.sinks.notifications.jvm.email ---- - - - - - -

- -

- -*** - -## Description - -This sink sends an email to a specified receiver. - -Before you use this sink, the settings of your email server need to be configured. -After you've installed the element, navigate to ``Settings``, open the panel ``Sinks Notifications JVM`` and add your - mail server and credentials. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -The following configuration is required: - -### Receiver Address - -The email address of the receiver. - -### Subject - -The subject of the email. - -### Content - -The mail text. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.onesignal.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.onesignal.md deleted file mode 100644 index 6b3a5b595..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.onesignal.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -id: org.apache.streampipes.sinks.notifications.jvm.onesignal -title: OneSignal -sidebar_label: OneSignal -original_id: org.apache.streampipes.sinks.notifications.jvm.onesignal ---- - - - - - -

- -

- -*** - -## Description - -This sink sends a push message to the OneSignal application - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -Describe the configuration parameters here - -### App Id - -The OneSignal application ID. - -### API Key - -The OneSignal API key. - -### Content - -The message that should be sent to OneSignal - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.slack.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.slack.md deleted file mode 100644 index f0486b912..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.slack.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -id: org.apache.streampipes.sinks.notifications.jvm.slack -title: Slack Notification -sidebar_label: Slack Notification -original_id: org.apache.streampipes.sinks.notifications.jvm.slack ---- - - - - - -

- -

- -*** - -## Description - -Slack bot to send notifications directly into your slack - -Before you use this sink, the Slack token needs to be configured. -After you've installed the element, navigate to ``Settings``, open the panel ``Sinks Notifications JVM`` and add your -Slack API token. -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -Describe the configuration parameters here - -### Receiver - -The receiver of the Slack message. - -### Channel Type - -The channel type, should be "User" or "Channel" - -### Content - -The message that should be sent. - - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.telegram.md b/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.telegram.md deleted file mode 100644 index a4e28dd57..000000000 --- a/website-v2/versioned_docs/version-0.70.0/pe/org.apache.streampipes.sinks.notifications.jvm.telegram.md +++ /dev/null @@ -1,71 +0,0 @@ ---- -id: org.apache.streampipes.sinks.notifications.jvm.telegram -title: Telegram Publisher -sidebar_label: Telegram Publisher -original_id: org.apache.streampipes.sinks.notifications.jvm.telegram ---- - - - - - -

- -

- -*** - -## Description - -Publisher to send notifications to a Telegram channel. - -In order to be able to do so, you will have first to: -* Create a Telegram public [channel](https://telegram.org/tour/channels). -> Private channels/groups: also supported. -* Create a Telegram BOT via [@BotFather](https://core.telegram.org/bots#3-how-do-i-create-a-bot) and get an API key. -* Set the bot as [administrator](https://www.wikihow.com/Make-Someone-an-Admin-on-Telegram) in your channel. - -*** - -## Required input - -This sink does not have any requirements and works with any incoming event type. - -*** - -## Configuration - -Describe the configuration parameters here. - -### Bot API Key - -The API Key generated by `@BotFather` when you created your bot. - -### Channel Name or Chat Id - -The handle name of your public channel (e.g. `@channel_name`). -> For private channels/groups: handle name only available for public channels. Use `chat_id` instead. - -### Content - -The message to be sent. - -## Output - -(not applicable for data sinks) diff --git a/website-v2/versioned_sidebars/version-0.70.0-sidebars.json b/website-v2/versioned_sidebars/version-0.70.0-sidebars.json deleted file mode 100644 index 22f576d1c..000000000 --- a/website-v2/versioned_sidebars/version-0.70.0-sidebars.json +++ /dev/null @@ -1,213 +0,0 @@ -{ - "documentation": { - "🚀 Try StreamPipes": [ - "user-guide-introduction", - "try-installation", - "try-tutorial" - ], - "💡 Concepts": [ - "concepts-overview" - ], - "🎓 Use StreamPipes": [ - "use-connect", - "use-pipeline-editor", - "use-managing-pipelines", - "use-dashboard", - "use-data-explorer", - "use-notifications", - "use-install-pipeline-elements", - "use-configurations" - ], - "📚 Pipeline Elements": [ - { - "type": "category", - "label": "Adapters", - "items": [ - "pe/org.apache.streampipes.connect.protocol.stream.kafka", - "pe/org.apache.streampipes.connect.protocol.stream.pulsar", - "pe/org.apache.streampipes.connect.adapters.coindesk", - "pe/org.apache.streampipes.connect.protocol.stream.file", - "pe/org.apache.streampipes.protocol.set.file", - "pe/org.apache.streampipes.connect.adapters.flic.mqtt", - "pe/org.apache.streampipes.connect.adapters.gdelt", - "pe/org.apache.streampipes.connect.protocol.stream.httpserver", - "pe/org.apache.streampipes.protocol.set.http", - "pe/org.apache.streampipes.connect.protocol.stream.http", - "pe/org.apache.streampipes.connect.adapters.iex.news", - "pe/org.apache.streampipes.connect.adapters.iex.stocks", - "pe/org.apache.streampipes.connect.adapters.iss", - "pe/org.apache.streampipes.connect.adapters.image.set", - "pe/org.apache.streampipes.connect.adapters.image.stream", - "pe/org.apache.streampipes.connect.adapters.influxdb.set", - "pe/org.apache.streampipes.connect.adapters.influxdb.stream", - "pe/org.apache.streampipes.connect.protocol.stream.mqtt", - "pe/org.apache.streampipes.connect.adapters.simulator.machine", - "pe/org.apache.streampipes.connect.adapters.mysql.set", - "pe/org.apache.streampipes.connect.adapters.mysql.stream", - "pe/org.apache.streampipes.connect.adapters.netio.mqtt", - "pe/org.apache.streampipes.connect.adapters.netio.rest", - "pe/org.apache.streampipes.connect.adapters.nswaustralia.trafficcamera", - "pe/org.apache.streampipes.connect.adapters.opcua", - "pe/org.apache.streampipes.connect.adapters.plc4x.modbus", - "pe/org.apache.streampipes.connect.adapters.plc4x.s7", - "pe/org.apache.streampipes.connect.adapters.ros", - "pe/org.apache.streampipes.connect.adapters.simulator.randomdataset", - "pe/org.apache.streampipes.connect.adapters.simulator.randomdatastream", - "pe/org.apache.streampipes.connect.adapters.slack", - "pe/org.apache.streampipes.connect.adapters.ti", - "pe/org.apache.streampipes.connect.adapters.wikipedia.edit", - "pe/org.apache.streampipes.connect.adapters.wikipedia.new" - ] - }, - { - "type": "category", - "label": "Data Processors", - "items": [ - "pe/org.apache.streampipes.processors.pattern-detection.flink.absence", - "pe/org.apache.streampipes.processors.aggregation.flink.aggregation", - "pe/org.apache.streampipes.processors.pattern-detection.flink.and", - "pe/org.apache.streampipes.processors.transformation.flink.processor.boilerplate", - "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.counter", - "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.inverter", - "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timer", - "pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.state", - "pe/org.apache.streampipes.processors.transformation.jvm.csvmetadata", - "pe/org.apache.streampipes.processors.transformation.jvm.duration-value", - "pe/org.apache.streampipes.processors.textmining.jvm.chunker", - "pe/org.apache.streampipes.processors.filters.jvm.compose", - "pe/org.apache.streampipes.processors.aggregation.flink.count", - "pe/org.apache.streampipes.processors.transformation.jvm.count-array", - "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.latLngToGeo", - "pe/org.apache.streampipes.processors.changedetection.jvm.cusum", - "pe/org.apache.streampipes.processors.geo.jvm.processor.distancecalculator", - "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.setEPSG", - "pe/org.apache.streampipes.processors.aggregation.flink.eventcount", - "pe/org.apache.streampipes.processors.aggregation.flink.rate", - "pe/org.apache.streampipes.processors.transformation.flink.field-converter", - "pe/org.apache.streampipes.processors.transformation.flink.fieldhasher", - "pe/org.apache.streampipes.processors.transformation.flink.field-mapper", - "pe/org.apache.streampipes.processors.transformation.flink.field-renamer", - "pe/org.apache.streampipes.processors.siddhi.frequency", - "pe/org.apache.streampipes.processors.siddhi.frequencychange", - "pe/org.apache.streampipes.processor.imageclassification.jvm.generic-image-classification", - "pe/org.apache.streampipes.processor.geo.jvm.geocoding", - "pe/org.apache.streampipes.processor.imageclassification.jvm.image-cropper", - "pe/org.apache.streampipes.processor.imageclassification.jvm.image-enricher", - "pe/org.apache.streampipes.processors.textmining.flink.languagedetection", - "pe/org.apache.streampipes.processors.textmining.jvm.languagedetection", - "pe/org.apache.streampipes.processors.enricher.flink.processor.math.mathop", - "pe/org.apache.streampipes.processors.transformation.jvm.booloperator.timekeeping", - "pe/org.apache.streampipes.processors.transformation.flink.measurement-unit-converter", - "pe/org.apache.streampipes.processors.filters.jvm.enrich", - "pe/org.apache.streampipes.processors.textmining.jvm.namefinder", - "pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.number", - "pe/org.apache.streampipes.processors.filters.jvm.numericalfilter", - "pe/org.apache.streampipes.processors.siddhi.numericalfilter", - "pe/org.apache.streampipes.processors.filters.jvm.numericaltextfilter", - "pe/org.apache.streampipes.processors.textmining.jvm.partofspeech", - "pe/org.apache.streampipes.processors.pattern-detection.flink.peak-detection", - "pe/org.apache.streampipes.processors.filters.jvm.project", - "pe/org.apache.streampipes.processor.imageclassification.qrcode", - "pe/org.apache.streampipes.processors.filters.jvm.limit", - "pe/org.apache.streampipes.processor.geo.jvm.reversegeocoding", - "pe/org.apache.streampipes.processors.textmining.jvm.sentencedetection", - "pe/org.apache.streampipes.processors.pattern-detection.flink.sequence", - "pe/org.apache.streampipes.processors.siddhi.sequence", - "pe/org.apache.streampipes.processors.transformation.jvm.processor.booloperator.edge", - "pe/org.apache.streampipes.processors.geo.jvm.jts.processor.trajectory", - "pe/org.apache.streampipes.processors.enricher.jvm.sizemeasure", - "pe/org.apache.streampipes.processor.geo.flink", - "pe/org.apache.streampipes.processors.geo.jvm.processor.speed", - "pe/org.apache.streampipes.processors.transformation.jvm.split-array", - "pe/org.apache.streampipes.processors.transformation.jvm.processor.state.buffer", - "pe/org.apache.streampipes.processors.transformation.jvm.processor.state.labeler.buffer", - "pe/org.apache.streampipes.processors.geo.jvm.processor.staticdistancecalculator", - "pe/org.apache.streampipes.processor.geo.jvm.staticgeocoding", - "pe/org.apache.streampipes.processors.enricher.flink.processor.math.staticmathop", - "pe/org.apache.streampipes.processors.statistics.flink.statistics-summary", - "pe/org.apache.streampipes.processors.siddhi.stop", - "pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.counter", - "pe/org.apache.streampipes.processors.transformation.jvm.stringoperator.timer", - "pe/org.apache.streampipes.processors.transformation.jvm.processor.stringoperator.state", - "pe/org.apache.streampipes.processors.filters.jvm.merge", - "pe/org.apache.streampipes.processors.transformation.jvm.taskduration", - "pe/org.apache.streampipes.processors.filters.jvm.textfilter", - "pe/org.apache.streampipes.processors.filters.jvm.threshold", - "pe/org.apache.streampipes.processors.enricher.flink.timestamp", - "pe/org.apache.streampipes.processors.transformation.jvm.processor.timestampextractor", - "pe/org.apache.streampipes.processors.textmining.jvm.tokenizer", - "pe/org.apache.streampipes.processors.transformation.jvm.transform-to-boolean", - "pe/org.apache.streampipes.processors.siddhi.increase", - "pe/org.apache.streampipes.processors.enricher.flink.processor.trigonometry", - "pe/org.apache.streampipes.processors.enricher.flink.processor.urldereferencing", - "pe/org.apache.streampipes.processors.transformation.jvm.changed-value", - "pe/org.apache.streampipes.processors.textmining.flink.wordcount" - ] - }, - { - "type": "category", - "label": "Data Sinks", - "items": [ - "pe/org.apache.streampipes.sinks.brokers.jvm.bufferrest", - "pe/org.apache.streampipes.sinks.databases.jvm.couchdb", - "pe/org.apache.streampipes.sinks.internal.jvm.dashboard", - "pe/org.apache.streampipes.sinks.internal.jvm.datalake", - "pe/org.apache.streampipes.sinks.databases.ditto", - "pe/org.apache.streampipes.sinks.databases.flink.elasticsearch", - "pe/org.apache.streampipes.sinks.notifications.jvm.email", - "pe/org.apache.streampipes.sinks.databases.jvm.influxdb", - "pe/org.apache.streampipes.sinks.databases.jvm.iotdb", - "pe/org.apache.streampipes.sinks.brokers.jvm.jms", - "pe/org.apache.streampipes.sinks.brokers.jvm.kafka", - "pe/org.apache.streampipes.sinks.databases.jvm.mysql", - "pe/org.apache.streampipes.sinks.brokers.jvm.nats", - "pe/org.apache.streampipes.sinks.internal.jvm.notification", - "pe/org.apache.streampipes.sinks.databases.jvm.opcua", - "pe/org.apache.streampipes.sinks.notifications.jvm.onesignal", - "pe/org.apache.streampipes.sinks.databases.jvm.postgresql", - "pe/org.apache.streampipes.sinks.brokers.jvm.pulsar", - "pe/org.apache.streampipes.sinks.brokers.jvm.rest", - "pe/org.apache.streampipes.sinks.brokers.jvm.rabbitmq", - "pe/org.apache.streampipes.sinks.notifications.jvm.slack", - "pe/org.apache.streampipes.sinks.notifications.jvm.telegram" - ] - } - ], - "⚡ Deploy StreamPipes": [ - "deploy-docker", - "deploy-kubernetes", - "deploy-use-ssl", - "deploy-security" - ], - "💻 Extend StreamPipes": [ - "extend-setup", - "extend-cli", - "extend-archetypes", - "extend-first-processor", - "extend-tutorial-data-sources", - "extend-tutorial-data-processors", - "extend-tutorial-data-sinks", - "extend-sdk-event-model", - "extend-sdk-stream-requirements", - "extend-sdk-static-properties", - "extend-sdk-output-strategies", - "extend-sdk-migration-service-discovery" - ], - "🔧 Technicals": [ - "technicals-architecture", - "technicals-user-guidance", - "technicals-runtime-wrappers", - "technicals-messaging", - "technicals-configuration" - ], - "👪 Community": [ - "community-get-help", - "community-contribute" - ] - }, - "faq": { - "FAQ": [ - "faq-common-problems" - ] - } -} diff --git a/website-v2/versions.json b/website-v2/versions.json index e704e2796..2156012eb 100644 --- a/website-v2/versions.json +++ b/website-v2/versions.json @@ -4,6 +4,5 @@ "0.93.0", "0.92.0", "0.91.0", - "0.90.0", - "0.70.0" + "0.90.0" ]