diff --git a/.github/workflows/validate-generated-files.yml b/.github/workflows/validate-generated-files.yml index 9c0a35004..0787b4483 100644 --- a/.github/workflows/validate-generated-files.yml +++ b/.github/workflows/validate-generated-files.yml @@ -25,8 +25,6 @@ jobs: - name: Check generated files run: | export PATH=$PATH:$(go env GOPATH)/bin - make install-tools - make generate - make proto-generate + make install-tools generate proto-generate git diff git diff --exit-code --numstat diff --git a/.gitignore b/.gitignore index 07f5d9b45..63ebfdf26 100644 --- a/.gitignore +++ b/.gitignore @@ -75,6 +75,8 @@ dist/ # Standalone connectors /connectors +# Standalone processors +/processors /pipelines @@ -86,3 +88,6 @@ escape_analysis.txt # Profiles *.prof + +# Compiled test wasm processors +pkg/plugin/processor/standalone/test/wasm_processors/*/processor.wasm diff --git a/.golangci.yml b/.golangci.yml index 1cc408496..a23501695 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,7 +1,10 @@ run: timeout: 3m + skip-dirs-use-default: false skip-dirs: - - /ui/.* + - ^examples/ + - ^ui/ + - ^pkg/plugin/processor/builtin/internal/diff # external code linters-settings: depguard: @@ -40,6 +43,9 @@ linters-settings: issues: exclude-rules: + - path: 'pkg/plugin/processor/builtin/impl' + linters: + - goconst - path: '(.+)acceptance_testing\.go' linters: - stylecheck diff --git a/Makefile b/Makefile index c074092aa..cad4c3698 100644 --- a/Makefile +++ b/Makefile @@ -65,7 +65,7 @@ install-tools: download @go mod tidy generate: - go generate ./... + go generate -x ./... pkg/web/ui/dist: make ui-dist diff --git a/docs/connectors.md b/docs/connectors.md index 91ded276c..b74a167a2 100644 --- a/docs/connectors.md +++ b/docs/connectors.md @@ -46,7 +46,7 @@ Once you have chosen a connector to be built-in, you can: - Download the new package and its dependencies: `go get "github.com/foo/conduit-connector-new"` - Import the Go module defining the connector -into the [builtin registry](https://github.com/ConduitIO/conduit/blob/main/pkg/plugin/builtin/registry.go) +into the [builtin registry](https://github.com/ConduitIO/conduit/blob/main/pkg/plugin/connector/builtin/registry.go) and add a new key to `DefaultDispenserFactories`: ```diff diff --git a/go.mod b/go.mod index 661529586..8a64b82fa 100644 --- a/go.mod +++ b/go.mod @@ -1,15 +1,14 @@ module github.com/conduitio/conduit -go 1.21.1 +go 1.21.5 require ( buf.build/gen/go/grpc-ecosystem/grpc-gateway/protocolbuffers/go v1.32.0-20231027202514-3f42134f4c56.1 github.com/Masterminds/semver/v3 v3.2.1 github.com/Masterminds/sprig/v3 v3.2.3 github.com/NYTimes/gziphandler v1.1.1 - github.com/antchfx/jsonquery v1.3.3 github.com/bufbuild/buf v1.29.0 - github.com/conduitio/conduit-commons v0.0.0-20240112191423-58fcb3055cf2 + github.com/conduitio/conduit-commons v0.0.1 github.com/conduitio/conduit-connector-file v0.6.0 github.com/conduitio/conduit-connector-generator v0.5.0 github.com/conduitio/conduit-connector-kafka v0.7.1 @@ -18,10 +17,11 @@ require ( github.com/conduitio/conduit-connector-protocol v0.5.1-0.20240104160905-e9e61586fb8d github.com/conduitio/conduit-connector-s3 v0.5.1 github.com/conduitio/conduit-connector-sdk v0.8.0 + github.com/conduitio/conduit-processor-sdk v0.1.0 github.com/conduitio/yaml/v3 v3.3.0 github.com/dgraph-io/badger/v4 v4.2.0 - github.com/dop251/goja v0.0.0-20230531210528-d7324b2d74f7 - github.com/dop251/goja_nodejs v0.0.0-20230602164024-804a84515562 + github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d + github.com/dop251/goja_nodejs v0.0.0-20231122114759-e84d9a924c5c github.com/gammazero/deque v0.2.1 github.com/goccy/go-json v0.10.2 github.com/golangci/golangci-lint v1.56.2 @@ -43,6 +43,8 @@ require ( github.com/prometheus/client_model v0.6.0 github.com/prometheus/common v0.47.0 github.com/rs/zerolog v1.32.0 + github.com/stealthrocket/wazergo v0.19.1 + github.com/tetratelabs/wazero v1.6.0 github.com/twmb/go-cache v1.2.1 go.uber.org/goleak v1.3.0 go.uber.org/mock v0.4.0 @@ -79,7 +81,6 @@ require ( github.com/alexkohler/nakedret/v2 v2.0.2 // indirect github.com/alexkohler/prealloc v1.0.0 // indirect github.com/alingse/asasalint v0.0.11 // indirect - github.com/antchfx/xpath v1.2.3 // indirect github.com/antlr4-go/antlr/v4 v4.13.0 // indirect github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 // indirect github.com/apache/thrift v0.19.0 // indirect @@ -116,7 +117,6 @@ require ( github.com/butuzov/mirror v1.1.0 // indirect github.com/catenacyber/perfsprint v0.6.0 // indirect github.com/ccojocar/zxcvbn-go v1.0.2 // indirect - github.com/cenkalti/backoff/v4 v4.2.1 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/charithe/durationcheck v0.0.10 // indirect github.com/chavacava/garif v0.1.0 // indirect @@ -128,7 +128,7 @@ require ( github.com/denis-tingaikin/go-header v0.4.3 // indirect github.com/dgraph-io/ristretto v0.1.1 // indirect github.com/distribution/reference v0.5.0 // indirect - github.com/dlclark/regexp2 v1.8.0 // indirect + github.com/dlclark/regexp2 v1.10.0 // indirect github.com/docker/cli v24.0.7+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect github.com/docker/docker v25.0.0+incompatible // indirect @@ -214,7 +214,7 @@ require ( github.com/kisielk/errcheck v1.7.0 // indirect github.com/kisielk/gotool v1.0.0 // indirect github.com/kkHAIKE/contextcheck v1.1.4 // indirect - github.com/klauspost/compress v1.17.4 // indirect + github.com/klauspost/compress v1.17.5 // indirect github.com/klauspost/pgzip v1.2.6 // indirect github.com/kulti/thelper v0.6.3 // indirect github.com/kunwardeep/paralleltest v1.0.9 // indirect @@ -297,7 +297,6 @@ require ( github.com/t-yuki/gocover-cobertura v0.0.0-20180217150009-aaee18c8195c // indirect github.com/tdakkota/asciicheck v0.2.0 // indirect github.com/tetafro/godot v1.4.16 // indirect - github.com/tetratelabs/wazero v1.6.0 // indirect github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 // indirect github.com/timonwong/loggercheck v0.9.4 // indirect github.com/tomarrell/wrapcheck/v2 v2.8.1 // indirect @@ -319,12 +318,10 @@ require ( go-simpler.org/sloglint v0.4.0 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 // indirect - go.opentelemetry.io/otel v1.22.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 // indirect - go.opentelemetry.io/otel/metric v1.22.0 // indirect - go.opentelemetry.io/otel/sdk v1.22.0 // indirect - go.opentelemetry.io/otel/trace v1.22.0 // indirect - go.opentelemetry.io/proto/otlp v1.0.0 // indirect + go.opentelemetry.io/otel v1.24.0 // indirect + go.opentelemetry.io/otel/metric v1.24.0 // indirect + go.opentelemetry.io/otel/sdk v1.24.0 // indirect + go.opentelemetry.io/otel/trace v1.24.0 // indirect go.uber.org/atomic v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.26.0 // indirect diff --git a/go.sum b/go.sum index e9aa63149..8e47b3f27 100644 --- a/go.sum +++ b/go.sum @@ -927,10 +927,6 @@ github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cv github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw= github.com/alingse/asasalint v0.0.11/go.mod h1:nCaoMhw7a9kSJObvQyVzNTPBDbNpdocqrSP7t/cW5+I= github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/antchfx/jsonquery v1.3.3 h1:zjZpbnZhYng3uOAbIfdNq81A9mMEeuDJeYIpeKpZ4es= -github.com/antchfx/jsonquery v1.3.3/go.mod h1:1JG4DqRlRCHgVYDPY1ioYFAGSXGfWHzNgrbiGQHsWck= -github.com/antchfx/xpath v1.2.3 h1:CCZWOzv5bAqjVv0offZ2LVgVYFbeldKQVuLNbViZdes= -github.com/antchfx/xpath v1.2.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= @@ -1084,8 +1080,8 @@ github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWH github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/colinmarc/hdfs/v2 v2.1.1/go.mod h1:M3x+k8UKKmxtFu++uAZ0OtDU8jR3jnaZIAc6yK4Ue0c= -github.com/conduitio/conduit-commons v0.0.0-20240112191423-58fcb3055cf2 h1:KWVS2fHWB1NI3hdUg1xxzTB0TT7iNPUt35O8IoW1p5o= -github.com/conduitio/conduit-commons v0.0.0-20240112191423-58fcb3055cf2/go.mod h1:43mPxHKxsZbmqf1Gw+hpx3ByXAmp8doWTQNNfCoyXp8= +github.com/conduitio/conduit-commons v0.0.1 h1:UjY/dsfr88N0l8SdJ3hBOYq38qYmV6DEe5taxvdYjds= +github.com/conduitio/conduit-commons v0.0.1/go.mod h1:WjmUZpazjAolYz8SB++xvJlN47ro3AStDYhwRCK/yRc= github.com/conduitio/conduit-connector-file v0.6.0 h1:8tsGeGhKvFwYQZztOOL5/tmOhVShsfo9lQ3b/0fX8kQ= github.com/conduitio/conduit-connector-file v0.6.0/go.mod h1:ju7PiB4kTJgqng4KVXDt/Gvw/53kFwSzi5Ez9EDXxNI= github.com/conduitio/conduit-connector-generator v0.5.0 h1:zpXHif89DCJ13nftKLv31uI2AJGicpY5H1V7SwldRNo= @@ -1102,6 +1098,8 @@ github.com/conduitio/conduit-connector-s3 v0.5.1 h1:yRo8004ryCIZc/S3iWQ1rN6pm6bj github.com/conduitio/conduit-connector-s3 v0.5.1/go.mod h1:nbxzsyS95gbFJ28Job9vFFB+byRFINSv70/13Yi4mKQ= github.com/conduitio/conduit-connector-sdk v0.8.0 h1:gvchqoj5d3AQsBoIosx4i32L8Ex9+5BuAyHi/IM9VD4= github.com/conduitio/conduit-connector-sdk v0.8.0/go.mod h1:nOz4K3X6fD8YMe5CPbULwSEE18Eu02ZrpT6o6KwQfxs= +github.com/conduitio/conduit-processor-sdk v0.1.0 h1:DpCDFZGd9skoAEizZ+5B58X4oyPO0saTsoka+N/Awek= +github.com/conduitio/conduit-processor-sdk v0.1.0/go.mod h1:9sjasEukN9HAXj1xZ6kLNzz8mpn2TGrbD9mmPYpYNv8= github.com/conduitio/yaml/v3 v3.3.0 h1:kbbaOSHcuH39gP4+rgbJGl6DSbLZcJgEaBvkEXJlCsI= github.com/conduitio/yaml/v3 v3.3.0/go.mod h1:JNgFMOX1t8W4YJuRZOh6GggVtSMsgP9XgTw+7dIenpc= github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= @@ -1141,8 +1139,8 @@ github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.7.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= -github.com/dlclark/regexp2 v1.8.0 h1:rJD5HeGIT/2b5CDk63FVCwZA3qgYElfg+oQK7uH5pfE= -github.com/dlclark/regexp2 v1.8.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= +github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0= +github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= github.com/docker/cli v24.0.7+incompatible h1:wa/nIwYFW7BVTGa7SWPVyyXU9lgORqUb1xfI36MSkFg= @@ -1159,12 +1157,12 @@ github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4 github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= github.com/dop251/goja v0.0.0-20211022113120-dc8c55024d06/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk= -github.com/dop251/goja v0.0.0-20230531210528-d7324b2d74f7 h1:cVGkvrdHgyBkYeB6kMCaF5j2d9Bg4trgbIpcUrKrvk4= -github.com/dop251/goja v0.0.0-20230531210528-d7324b2d74f7/go.mod h1:QMWlm50DNe14hD7t24KEqZuUdC9sOTy8W6XbCU1mlw4= +github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d h1:wi6jN5LVt/ljaBG4ue79Ekzb12QfJ52L9Q98tl8SWhw= +github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d/go.mod h1:QMWlm50DNe14hD7t24KEqZuUdC9sOTy8W6XbCU1mlw4= github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y= github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= -github.com/dop251/goja_nodejs v0.0.0-20230602164024-804a84515562 h1:0gomDSJiLLlpfKxQAHt5zj+9toIcyLMPgkI/Mgv7FAU= -github.com/dop251/goja_nodejs v0.0.0-20230602164024-804a84515562/go.mod h1:X2TOTJ+Uamd454RFp7ig2tmP3hQg0Z2Qk8gbVQmU0mk= +github.com/dop251/goja_nodejs v0.0.0-20231122114759-e84d9a924c5c h1:hLoodLRD4KLWIH8eyAQCLcH8EqIrjac7fCkp/fHnvuQ= +github.com/dop251/goja_nodejs v0.0.0-20231122114759-e84d9a924c5c/go.mod h1:bhGPmCgCCTSRfiMYWjpS46IDo9EUZXlsuUaPXSWGbv0= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= @@ -1640,8 +1638,8 @@ github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYs github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= github.com/klauspost/compress v1.15.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk= github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU= -github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= -github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/klauspost/compress v1.17.5 h1:d4vBd+7CHydUqpFBgUEKkSdtSugf9YFmSkvUYPquI5E= +github.com/klauspost/compress v1.17.5/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= @@ -1966,6 +1964,8 @@ github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YE github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= github.com/stbenjam/no-sprintf-host-port v0.1.1 h1:tYugd/yrm1O0dV+ThCbaKZh195Dfm07ysF0U6JQXczc= github.com/stbenjam/no-sprintf-host-port v0.1.1/go.mod h1:TLhvtIvONRzdmkFiio4O8LHsN9N74I+PhRquPsxpL0I= +github.com/stealthrocket/wazergo v0.19.1 h1:BPrITETPgSFwiytwmToO0MbUC/+RGC39JScz1JmmG6c= +github.com/stealthrocket/wazergo v0.19.1/go.mod h1:riI0hxw4ndZA5e6z7PesHg2BtTftcZaMxRcoiGGipTs= github.com/stoewer/go-strcase v1.3.0 h1:g0eASXYtp+yvN9fK8sH94oCIk0fau9uV1/ZdJ0AVEzs= github.com/stoewer/go-strcase v1.3.0/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -2076,25 +2076,25 @@ go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0 h1:sv9kVfal0MK0wBMCOGr+HeJm9v803BkJxGrk2au7j08= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0/go.mod h1:SK2UL73Zy1quvRPonmOmRDiWk1KBV3LyIeeIxcEApWw= -go.opentelemetry.io/otel v1.22.0 h1:xS7Ku+7yTFvDfDraDIJVpw7XPyuHlB9MCiqqX5mcJ6Y= -go.opentelemetry.io/otel v1.22.0/go.mod h1:eoV4iAi3Ea8LkAEI9+GFT44O6T/D0GWAVFyZVCC6pMI= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 h1:9M3+rhx7kZCIQQhQRYaZCdNu1V73tm4TvXs2ntl98C4= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0/go.mod h1:noq80iT8rrHP1SfybmPiRGc9dc5M8RPmGvtwo7Oo7tc= +go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo= +go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.24.0 h1:t6wl9SPayj+c7lEIFgm4ooDBZVb01IhLB4InpomhRw8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.24.0/go.mod h1:iSDOcsnSA5INXzZtwaBPrKp/lWu/V14Dd+llD0oI2EA= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.22.0 h1:FyjCyI9jVEfqhUh2MoSkmolPjfh5fp2hnV0b0irxH4Q= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.22.0/go.mod h1:hYwym2nDEeZfG/motx0p7L7J1N1vyzIThemQsb4g2qY= -go.opentelemetry.io/otel/metric v1.22.0 h1:lypMQnGyJYeuYPhOM/bgjbFM6WE44W1/T45er4d8Hhg= -go.opentelemetry.io/otel/metric v1.22.0/go.mod h1:evJGjVpZv0mQ5QBRJoBF64yMuOf4xCWdXjK8pzFvliY= -go.opentelemetry.io/otel/sdk v1.22.0 h1:6coWHw9xw7EfClIC/+O31R8IY3/+EiRFHevmHafB2Gw= -go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc= +go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI= +go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco= +go.opentelemetry.io/otel/sdk v1.24.0 h1:YMPPDNymmQN3ZgczicBY3B6sf9n62Dlj9pWD3ucgoDw= +go.opentelemetry.io/otel/sdk v1.24.0/go.mod h1:KVrIYw6tEubO9E96HQpcmpTKDVn9gdv35HoYiQWGDFg= go.opentelemetry.io/otel/sdk/metric v1.19.0 h1:EJoTO5qysMsYCa+w4UghwFV/ptQgqSL/8Ni+hx+8i1k= go.opentelemetry.io/otel/sdk/metric v1.19.0/go.mod h1:XjG0jQyFJrv2PbMvwND7LwCEhsJzCzV5210euduKcKY= -go.opentelemetry.io/otel/trace v1.22.0 h1:Hg6pPujv0XG9QaVbGOBVHunyuLcCC3jN7WEhPx83XD0= -go.opentelemetry.io/otel/trace v1.22.0/go.mod h1:RbbHXVqKES9QhzZq/fE5UnOSILqRt40a21sPw2He1xo= +go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI= +go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U= -go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I= -go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM= +go.opentelemetry.io/proto/otlp v1.1.0 h1:2Di21piLrCqJ3U3eXGCTPHE9R8Nh+0uglSnOyxikMeI= +go.opentelemetry.io/proto/otlp v1.1.0/go.mod h1:GpBHCBWiqvVLDqmHZsoMM3C5ySeKTC7ej/RNTae6MdY= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= diff --git a/pkg/conduit/config.go b/pkg/conduit/config.go index e2b92b808..feb8599e9 100644 --- a/pkg/conduit/config.go +++ b/pkg/conduit/config.go @@ -20,8 +20,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/database" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/plugin/builtin" - "github.com/conduitio/conduit/pkg/processor" + "github.com/conduitio/conduit/pkg/plugin/connector/builtin" "github.com/rs/zerolog" ) @@ -68,13 +67,16 @@ type Config struct { Path string } + Processors struct { + Path string + } + Pipelines struct { Path string ExitOnError bool } PluginDispenserFactories map[string]builtin.DispenserFactory - ProcessorBuilderRegistry *processor.BuilderRegistry dev struct { cpuprofile string @@ -94,10 +96,10 @@ func DefaultConfig() Config { cfg.Log.Level = "info" cfg.Log.Format = "cli" cfg.Connectors.Path = "./connectors" + cfg.Processors.Path = "./processors" cfg.Pipelines.Path = "./pipelines" cfg.PluginDispenserFactories = builtin.DefaultDispenserFactories - cfg.ProcessorBuilderRegistry = processor.GlobalBuilderRegistry return cfg } diff --git a/pkg/conduit/entrypoint.go b/pkg/conduit/entrypoint.go index 11167b444..ab3649962 100644 --- a/pkg/conduit/entrypoint.go +++ b/pkg/conduit/entrypoint.go @@ -89,7 +89,8 @@ func (*Entrypoint) Flags(cfg *Config) *flag.FlagSet { flags.StringVar(&cfg.Log.Level, "log.level", cfg.Log.Level, "sets logging level; accepts debug, info, warn, error, trace") flags.StringVar(&cfg.Log.Format, "log.format", cfg.Log.Format, "sets the format of the logging; accepts json, cli") - flags.StringVar(&cfg.Connectors.Path, "connectors.path", cfg.Connectors.Path, "path to standalone connectors directory") + flags.StringVar(&cfg.Connectors.Path, "connectors.path", cfg.Connectors.Path, "path to standalone connectors' directory") + flags.StringVar(&cfg.Processors.Path, "processors.path", cfg.Processors.Path, "path to standalone processors' directory") flags.StringVar(&cfg.Pipelines.Path, "pipelines.path", cfg.Pipelines.Path, "path to the directory that has the yaml pipeline configuration files, or a single pipeline configuration file") flags.BoolVar(&cfg.Pipelines.ExitOnError, "pipelines.exit-on-error", cfg.Pipelines.ExitOnError, "exit Conduit if a pipeline experiences an error while running") diff --git a/pkg/conduit/runtime.go b/pkg/conduit/runtime.go index 1654f11d5..759c00ec3 100644 --- a/pkg/conduit/runtime.go +++ b/pkg/conduit/runtime.go @@ -43,9 +43,12 @@ import ( "github.com/conduitio/conduit/pkg/foundation/multierror" "github.com/conduitio/conduit/pkg/orchestrator" "github.com/conduitio/conduit/pkg/pipeline" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/builtin" - "github.com/conduitio/conduit/pkg/plugin/standalone" + conn_plugin "github.com/conduitio/conduit/pkg/plugin/connector" + conn_builtin "github.com/conduitio/conduit/pkg/plugin/connector/builtin" + conn_standalone "github.com/conduitio/conduit/pkg/plugin/connector/standalone" + proc_plugin "github.com/conduitio/conduit/pkg/plugin/processor" + proc_builtin "github.com/conduitio/conduit/pkg/plugin/processor/builtin" + proc_standalone "github.com/conduitio/conduit/pkg/plugin/processor/standalone" "github.com/conduitio/conduit/pkg/processor" "github.com/conduitio/conduit/pkg/provisioning" "github.com/conduitio/conduit/pkg/web/api" @@ -63,10 +66,6 @@ import ( "google.golang.org/grpc/reflection" "google.golang.org/grpc/stats" "gopkg.in/tomb.v2" - - // NB: anonymous import triggers processor registry creation - _ "github.com/conduitio/conduit/pkg/processor/procbuiltin" - _ "github.com/conduitio/conduit/pkg/processor/procjs" ) const ( @@ -87,7 +86,8 @@ type Runtime struct { connectorService *connector.Service processorService *processor.Service - pluginService *plugin.Service + connectorPluginService *conn_plugin.PluginService + processorPluginService *proc_plugin.PluginService connectorPersister *connector.Persister logger log.CtxLogger @@ -132,14 +132,14 @@ func NewRuntime(cfg Config) (*Runtime, error) { ) // Create all necessary internal services - plService, connService, procService, pluginService, err := newServices(logger, db, connectorPersister, cfg) + plService, connService, procService, connPluginService, procPluginService, err := newServices(logger, db, connectorPersister, cfg) if err != nil { return nil, cerrors.Errorf("failed to create services: %w", err) } - provisionService := provisioning.NewService(db, logger, plService, connService, procService, pluginService, cfg.Pipelines.Path) + provisionService := provisioning.NewService(db, logger, plService, connService, procService, connPluginService, cfg.Pipelines.Path) - orc := orchestrator.NewOrchestrator(db, logger, plService, connService, procService, pluginService) + orc := orchestrator.NewOrchestrator(db, logger, plService, connService, procService, connPluginService, procPluginService) r := &Runtime{ Config: cfg, @@ -151,7 +151,9 @@ func NewRuntime(cfg Config) (*Runtime, error) { pipelineService: plService, connectorService: connService, processorService: procService, - pluginService: pluginService, + + connectorPluginService: connPluginService, + processorPluginService: procPluginService, connectorPersister: connectorPersister, @@ -184,17 +186,29 @@ func newServices( db database.DB, connPersister *connector.Persister, cfg Config, -) (*pipeline.Service, *connector.Service, *processor.Service, *plugin.Service, error) { - pipelineService := pipeline.NewService(logger, db) - connectorService := connector.NewService(logger, db, connPersister) - processorService := processor.NewService(logger, db, cfg.ProcessorBuilderRegistry) - pluginService := plugin.NewService( +) (*pipeline.Service, *connector.Service, *processor.Service, *conn_plugin.PluginService, *proc_plugin.PluginService, error) { + standaloneReg, err := proc_standalone.NewRegistry(logger, cfg.Processors.Path) + if err != nil { + return nil, nil, nil, nil, nil, cerrors.Errorf("failed creating processor registry: %w", err) + } + + procPluginService := proc_plugin.NewPluginService( logger, - builtin.NewRegistry(logger, cfg.PluginDispenserFactories), - standalone.NewRegistry(logger, cfg.Connectors.Path), + proc_builtin.NewRegistry(logger, proc_builtin.DefaultBuiltinProcessors), + standaloneReg, ) - return pipelineService, connectorService, processorService, pluginService, nil + connPluginService := conn_plugin.NewPluginService( + logger, + conn_builtin.NewRegistry(logger, cfg.PluginDispenserFactories), + conn_standalone.NewRegistry(logger, cfg.Connectors.Path), + ) + + pipelineService := pipeline.NewService(logger, db) + connectorService := connector.NewService(logger, db, connPersister) + processorService := processor.NewService(logger, db, procPluginService) + + return pipelineService, connectorService, processorService, connPluginService, procPluginService, nil } // Run initializes all of Conduit's underlying services and starts the GRPC and @@ -262,7 +276,7 @@ func (r *Runtime) Run(ctx context.Context) (err error) { } } - err = r.pipelineService.Run(ctx, r.connectorService, r.processorService, r.pluginService) + err = r.pipelineService.Run(ctx, r.connectorService, r.processorService, r.connectorPluginService) if err != nil { multierror.ForEach(err, func(err error) { r.logger.Err(ctx, err).Msg("pipeline failed to be started") @@ -417,13 +431,13 @@ func (r *Runtime) serveGRPCAPI(ctx context.Context, t *tomb.Tomb) (net.Addr, err pipelineAPIv1 := api.NewPipelineAPIv1(r.Orchestrator.Pipelines) pipelineAPIv1.Register(grpcServer) - processorAPIv1 := api.NewProcessorAPIv1(r.Orchestrator.Processors) + processorAPIv1 := api.NewProcessorAPIv1(r.Orchestrator.Processors, r.Orchestrator.ProcessorPlugins) processorAPIv1.Register(grpcServer) - connectorAPIv1 := api.NewConnectorAPIv1(r.Orchestrator.Connectors) + connectorAPIv1 := api.NewConnectorAPIv1(r.Orchestrator.Connectors, r.Orchestrator.ConnectorPlugins) connectorAPIv1.Register(grpcServer) - pluginAPIv1 := api.NewPluginAPIv1(r.Orchestrator.Plugins) + pluginAPIv1 := api.NewPluginAPIv1(r.Orchestrator.ConnectorPlugins) pluginAPIv1.Register(grpcServer) info := api.NewInformation(Version(false)) @@ -436,10 +450,11 @@ func (r *Runtime) serveGRPCAPI(ctx context.Context, t *tomb.Tomb) (net.Addr, err // Names taken from api.proto healthServer := api.NewHealthServer( map[string]api.Checker{ - "PipelineService": r.pipelineService, - "ConnectorService": r.connectorService, - "ProcessorService": r.processorService, - "PluginService": r.pluginService, + "PipelineService": r.pipelineService, + "ConnectorService": r.connectorService, + "ProcessorService": r.processorService, + "ConnectorPluginService": r.connectorPluginService, + "ProcessorPluginService": r.processorPluginService, }, r.logger, ) diff --git a/pkg/connector/destination.go b/pkg/connector/destination.go index 6d75fee23..e1bdce52d 100644 --- a/pkg/connector/destination.go +++ b/pkg/connector/destination.go @@ -21,14 +21,15 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/record" ) type Destination struct { Instance *Instance - dispenser plugin.Dispenser - plugin plugin.DestinationPlugin + dispenser connectorPlugin.Dispenser + plugin connectorPlugin.DestinationPlugin // errs is used to signal the node that the connector experienced an error // when it was processing something asynchronously (e.g. persisting state). @@ -37,7 +38,7 @@ type Destination struct { // stopStream is a function that closes the context of the stream stopStream context.CancelFunc - // wg tracks the number of in flight calls to the plugin. + // wg tracks the number of in flight calls to the connectorPlugin. wg sync.WaitGroup } diff --git a/pkg/connector/destination_test.go b/pkg/connector/destination_test.go index b4f1a5755..4343b811b 100644 --- a/pkg/connector/destination_test.go +++ b/pkg/connector/destination_test.go @@ -22,7 +22,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/database/inmemory" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/mock" + "github.com/conduitio/conduit/pkg/plugin/connector/mock" "github.com/matryer/is" "go.uber.org/mock/gomock" ) diff --git a/pkg/connector/instance.go b/pkg/connector/instance.go index 2dfa7e334..656e4442b 100644 --- a/pkg/connector/instance.go +++ b/pkg/connector/instance.go @@ -24,7 +24,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/inspector" - "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" ) const ( @@ -86,7 +86,7 @@ type Connector interface { // PluginDispenserFetcher can fetch a plugin dispenser. type PluginDispenserFetcher interface { - NewDispenser(logger log.CtxLogger, name string) (plugin.Dispenser, error) + NewDispenser(logger log.CtxLogger, name string) (connectorPlugin.Dispenser, error) } func (i *Instance) Init(logger log.CtxLogger, persister *Persister) { diff --git a/pkg/connector/instance_test.go b/pkg/connector/instance_test.go index 16852543c..aa755daae 100644 --- a/pkg/connector/instance_test.go +++ b/pkg/connector/instance_test.go @@ -17,12 +17,13 @@ package connector import ( "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" ) // fakePluginFetcher fulfills the PluginFetcher interface. -type fakePluginFetcher map[string]plugin.Dispenser +type fakePluginFetcher map[string]connectorPlugin.Dispenser -func (fpf fakePluginFetcher) NewDispenser(_ log.CtxLogger, name string) (plugin.Dispenser, error) { +func (fpf fakePluginFetcher) NewDispenser(_ log.CtxLogger, name string) (connectorPlugin.Dispenser, error) { plug, ok := fpf[name] if !ok { return nil, plugin.ErrPluginNotFound diff --git a/pkg/connector/service_test.go b/pkg/connector/service_test.go index 3877d8fb7..57696b4ab 100644 --- a/pkg/connector/service_test.go +++ b/pkg/connector/service_test.go @@ -25,7 +25,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/database/inmemory" "github.com/conduitio/conduit/pkg/foundation/database/mock" "github.com/conduitio/conduit/pkg/foundation/log" - pmock "github.com/conduitio/conduit/pkg/plugin/mock" + pmock "github.com/conduitio/conduit/pkg/plugin/connector/mock" "github.com/conduitio/conduit/pkg/record" "github.com/google/uuid" "github.com/matryer/is" diff --git a/pkg/connector/source.go b/pkg/connector/source.go index 077571523..44119709e 100644 --- a/pkg/connector/source.go +++ b/pkg/connector/source.go @@ -21,14 +21,15 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/record" ) type Source struct { Instance *Instance - dispenser plugin.Dispenser - plugin plugin.SourcePlugin + dispenser connectorPlugin.Dispenser + plugin connectorPlugin.SourcePlugin // errs is used to signal the node that the connector experienced an error // when it was processing something asynchronously (e.g. persisting state). @@ -37,7 +38,7 @@ type Source struct { // stopStream is a function that closes the context of the stream stopStream context.CancelFunc - // wg tracks the number of in flight calls to the plugin. + // wg tracks the number of in flight calls to the connectorPlugin. wg sync.WaitGroup } diff --git a/pkg/connector/source_test.go b/pkg/connector/source_test.go index cea8c3bac..18d2fce6e 100644 --- a/pkg/connector/source_test.go +++ b/pkg/connector/source_test.go @@ -25,7 +25,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/database/inmemory" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/mock" + "github.com/conduitio/conduit/pkg/plugin/connector/mock" "github.com/conduitio/conduit/pkg/record" "github.com/matryer/is" "go.uber.org/mock/gomock" diff --git a/pkg/foundation/cerrors/cerrors.go b/pkg/foundation/cerrors/cerrors.go index 1f935060d..f24f84832 100644 --- a/pkg/foundation/cerrors/cerrors.go +++ b/pkg/foundation/cerrors/cerrors.go @@ -37,6 +37,7 @@ var ( Is = errors.Is As = errors.As Unwrap = errors.Unwrap + Join = errors.Join ) type Frame struct { diff --git a/pkg/foundation/log/ctxlogger.go b/pkg/foundation/log/ctxlogger.go index 8ba0c28a5..edf5d344a 100644 --- a/pkg/foundation/log/ctxlogger.go +++ b/pkg/foundation/log/ctxlogger.go @@ -16,6 +16,9 @@ package log import ( "context" + "reflect" + "strings" + "testing" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/rs/zerolog" @@ -46,6 +49,11 @@ func Nop() CtxLogger { return CtxLogger{Logger: zerolog.Nop()} } +// Test returns a test logger that writes to the supplied testing.TB. +func Test(t testing.TB) CtxLogger { + return CtxLogger{Logger: zerolog.New(zerolog.NewTestWriter(t))} +} + // InitLogger returns a logger initialized with the wanted level and format func InitLogger(level zerolog.Level, f Format) CtxLogger { var w = GetWriter(f) @@ -67,6 +75,20 @@ func (l CtxLogger) WithComponent(component string) CtxLogger { return l } +func (l CtxLogger) WithComponentFromType(c any) CtxLogger { + cType := reflect.TypeOf(c) + for cType.Kind() == reflect.Ptr || cType.Kind() == reflect.Interface { + cType = cType.Elem() + } + + pkgPath := cType.PkgPath() + pkgPath = strings.TrimPrefix(pkgPath, "github.com/conduitio/conduit/pkg/") + pkgPath = strings.ReplaceAll(pkgPath, "/", ".") + typeName := cType.Name() + l.component = pkgPath + "." + typeName + return l +} + func (l CtxLogger) Component() string { return l.component } diff --git a/pkg/foundation/log/ctxlogger_test.go b/pkg/foundation/log/ctxlogger_test.go index 5e4edb80e..2054b93ae 100644 --- a/pkg/foundation/log/ctxlogger_test.go +++ b/pkg/foundation/log/ctxlogger_test.go @@ -41,6 +41,20 @@ func TestCtxLoggerComponent(t *testing.T) { is.Equal(`{"level":"info","component":"test","message":"testing component"}`+"\n", got) } +type testComponent struct{} + +func TestCtxLoggerComponentFromType(t *testing.T) { + is := is.New(t) + + logger := New(zerolog.New(zerolog.NewTestWriter(t))) + + logger = logger.WithComponentFromType(testComponent{}) + is.Equal("foundation.log.testComponent", logger.Component()) + + logger = logger.WithComponentFromType(&testComponent{}) + is.Equal("foundation.log.testComponent", logger.Component()) +} + func TestCtxLoggerWithoutHooks(t *testing.T) { ctx := context.Background() diff --git a/pkg/foundation/log/fields.go b/pkg/foundation/log/fields.go index b79d0f517..dd0bbc61f 100644 --- a/pkg/foundation/log/fields.go +++ b/pkg/foundation/log/fields.go @@ -22,6 +22,7 @@ const ( NodeIDField = "node_id" ParallelWorkerIDField = "parallel_worker_id" PipelineIDField = "pipeline_id" + ProcessorIDField = "processor_id" RecordPositionField = "record_position" RequestIDField = "request_id" ServerAddressField = "address" diff --git a/pkg/orchestrator/plugins.go b/pkg/orchestrator/connector_plugins.go similarity index 72% rename from pkg/orchestrator/plugins.go rename to pkg/orchestrator/connector_plugins.go index 65a775f73..4db581027 100644 --- a/pkg/orchestrator/plugins.go +++ b/pkg/orchestrator/connector_plugins.go @@ -17,11 +17,11 @@ package orchestrator import ( "context" - "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" ) -type PluginOrchestrator base +type ConnectorPluginOrchestrator base -func (ps *PluginOrchestrator) List(ctx context.Context) (map[string]plugin.Specification, error) { - return ps.plugins.List(ctx) +func (ps *ConnectorPluginOrchestrator) List(ctx context.Context) (map[string]connector.Specification, error) { + return ps.connectorPlugins.List(ctx) } diff --git a/pkg/orchestrator/connectors.go b/pkg/orchestrator/connectors.go index 539044703..6076a8863 100644 --- a/pkg/orchestrator/connectors.go +++ b/pkg/orchestrator/connectors.go @@ -74,7 +74,7 @@ func (c *ConnectorOrchestrator) Create( if err != nil { return nil, err } - r.Append(func() error { return c.connectors.Delete(ctx, conn.ID, c.plugins) }) + r.Append(func() error { return c.connectors.Delete(ctx, conn.ID, c.connectorPlugins) }) _, err = c.pipelines.AddConnector(ctx, pl.ID, conn.ID) if err != nil { @@ -127,7 +127,7 @@ func (c *ConnectorOrchestrator) Delete(ctx context.Context, id string) error { if pl.Status == pipeline.StatusRunning { return pipeline.ErrPipelineRunning } - err = c.connectors.Delete(ctx, id, c.plugins) + err = c.connectors.Delete(ctx, id, c.connectorPlugins) if err != nil { return err } @@ -203,16 +203,12 @@ func (c *ConnectorOrchestrator) Validate( plugin string, config connector.Config, ) error { - d, err := c.plugins.NewDispenser(c.logger, plugin) - if err != nil { - return cerrors.Errorf("couldn't get dispenser: %w", err) - } - + var err error switch t { case connector.TypeSource: - err = c.plugins.ValidateSourceConfig(ctx, d, config.Settings) + err = c.connectorPlugins.ValidateSourceConfig(ctx, plugin, config.Settings) case connector.TypeDestination: - err = c.plugins.ValidateDestinationConfig(ctx, d, config.Settings) + err = c.connectorPlugins.ValidateDestinationConfig(ctx, plugin, config.Settings) default: return cerrors.New("invalid connector type") } diff --git a/pkg/orchestrator/connectors_test.go b/pkg/orchestrator/connectors_test.go index 5cb81087c..04f75870b 100644 --- a/pkg/orchestrator/connectors_test.go +++ b/pkg/orchestrator/connectors_test.go @@ -24,7 +24,6 @@ import ( "github.com/conduitio/conduit/pkg/foundation/database/inmemory" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/pipeline" - pmock "github.com/conduitio/conduit/pkg/plugin/mock" "github.com/google/uuid" "github.com/matryer/is" "go.uber.org/mock/gomock" @@ -34,8 +33,7 @@ func TestConnectorOrchestrator_Create_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -57,18 +55,13 @@ func TestConnectorOrchestrator_Create_Success(t *testing.T) { UpdatedAt: time.Now().UTC(), } - pluginDispenser := pmock.NewDispenser(ctrl) - plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - pluginMock.EXPECT(). - NewDispenser(gomock.Any(), want.Plugin). - Return(pluginDispenser, nil) - pluginMock.EXPECT(). + connPluginMock.EXPECT(). ValidateSourceConfig( gomock.AssignableToTypeOf(ctxType), - pluginDispenser, + want.Plugin, want.Config.Settings, ).Return(nil) consMock.EXPECT(). @@ -85,7 +78,7 @@ func TestConnectorOrchestrator_Create_Success(t *testing.T) { AddConnector(gomock.AssignableToTypeOf(ctxType), pl.ID, want.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Create(ctx, want.Type, want.Plugin, want.PipelineID, want.Config) is.NoErr(err) is.Equal(want, got) @@ -95,7 +88,7 @@ func TestConnectorOrchestrator_Create_PipelineNotExist(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pipelineID := uuid.NewString() wantErr := pipeline.ErrInstanceNotFound @@ -103,7 +96,7 @@ func TestConnectorOrchestrator_Create_PipelineNotExist(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pipelineID). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Create(ctx, connector.TypeSource, "test-plugin", pipelineID, connector.Config{}) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) @@ -114,7 +107,7 @@ func TestConnectorOrchestrator_Create_PipelineRunning(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -125,7 +118,7 @@ func TestConnectorOrchestrator_Create_PipelineRunning(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Create(ctx, connector.TypeSource, "test-plugin", pl.ID, connector.Config{}) is.True(err != nil) is.True(cerrors.Is(err, pipeline.ErrPipelineRunning)) @@ -136,7 +129,7 @@ func TestConnectorOrchestrator_Create_PipelineProvisionByConfig(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -148,7 +141,7 @@ func TestConnectorOrchestrator_Create_PipelineProvisionByConfig(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Create(ctx, connector.TypeSource, "test-plugin", pl.ID, connector.Config{}) is.Equal(got, nil) is.True(err != nil) @@ -159,10 +152,7 @@ func TestConnectorOrchestrator_Create_CreateConnectorError(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - plsMock, consMock, procsMock, pluginMock := newMockServices(t) - - pluginDispenser := pmock.NewDispenser(ctrl) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -173,13 +163,10 @@ func TestConnectorOrchestrator_Create_CreateConnectorError(t *testing.T) { plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - pluginMock.EXPECT(). - NewDispenser(gomock.Any(), "test-plugin"). - Return(pluginDispenser, nil) - pluginMock.EXPECT(). + connPluginMock.EXPECT(). ValidateSourceConfig( gomock.AssignableToTypeOf(ctxType), - pluginDispenser, + "test-plugin", config.Settings, ).Return(nil) consMock.EXPECT(). @@ -194,7 +181,7 @@ func TestConnectorOrchestrator_Create_CreateConnectorError(t *testing.T) { ). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Create(ctx, connector.TypeSource, "test-plugin", pl.ID, config) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) @@ -205,10 +192,7 @@ func TestConnectorOrchestrator_Create_AddConnectorError(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - plsMock, consMock, procsMock, pluginMock := newMockServices(t) - - pluginDispenser := pmock.NewDispenser(ctrl) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -234,13 +218,10 @@ func TestConnectorOrchestrator_Create_AddConnectorError(t *testing.T) { plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - pluginMock.EXPECT(). - NewDispenser(gomock.Any(), conn.Plugin). - Return(pluginDispenser, nil) - pluginMock.EXPECT(). + connPluginMock.EXPECT(). ValidateSourceConfig( gomock.AssignableToTypeOf(ctxType), - pluginDispenser, + conn.Plugin, conn.Config.Settings, ).Return(nil) consMock.EXPECT(). @@ -259,10 +240,10 @@ func TestConnectorOrchestrator_Create_AddConnectorError(t *testing.T) { Return(nil, wantErr) // this is called in rollback consMock.EXPECT(). - Delete(gomock.AssignableToTypeOf(ctxType), conn.ID, pluginMock). + Delete(gomock.AssignableToTypeOf(ctxType), conn.ID, connPluginMock). Return(nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Create(ctx, connector.TypeSource, conn.Plugin, pl.ID, conn.Config) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) @@ -273,7 +254,7 @@ func TestConnectorOrchestrator_Delete_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -291,13 +272,13 @@ func TestConnectorOrchestrator_Delete_Success(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) consMock.EXPECT(). - Delete(gomock.AssignableToTypeOf(ctxType), conn.ID, pluginMock). + Delete(gomock.AssignableToTypeOf(ctxType), conn.ID, connPluginMock). Return(nil) plsMock.EXPECT(). RemoveConnector(gomock.AssignableToTypeOf(ctxType), pl.ID, conn.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Connectors.Delete(ctx, conn.ID) is.NoErr(err) } @@ -306,7 +287,7 @@ func TestConnectorOrchestrator_Delete_ConnectorNotExist(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) id := uuid.NewString() wantErr := cerrors.New("connector doesn't exist") @@ -314,7 +295,7 @@ func TestConnectorOrchestrator_Delete_ConnectorNotExist(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), id). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Connectors.Delete(ctx, id) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) @@ -324,7 +305,7 @@ func TestConnectorOrchestrator_Delete_PipelineRunning(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -342,7 +323,7 @@ func TestConnectorOrchestrator_Delete_PipelineRunning(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Connectors.Delete(ctx, conn.ID) is.True(err != nil) is.Equal(pipeline.ErrPipelineRunning, err) @@ -352,7 +333,7 @@ func TestConnectorOrchestrator_Delete_ProcessorAttached(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -368,7 +349,7 @@ func TestConnectorOrchestrator_Delete_ProcessorAttached(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), conn.ID). Return(conn, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Connectors.Delete(ctx, conn.ID) is.True(err != nil) is.True(cerrors.Is(err, ErrConnectorHasProcessorsAttached)) @@ -378,7 +359,7 @@ func TestConnectorOrchestrator_Delete_Fail(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -397,10 +378,10 @@ func TestConnectorOrchestrator_Delete_Fail(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) consMock.EXPECT(). - Delete(gomock.AssignableToTypeOf(ctxType), conn.ID, pluginMock). + Delete(gomock.AssignableToTypeOf(ctxType), conn.ID, connPluginMock). Return(wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Connectors.Delete(ctx, conn.ID) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) @@ -410,7 +391,7 @@ func TestConnectorOrchestrator_Delete_RemoveConnectorFailed(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -430,7 +411,7 @@ func TestConnectorOrchestrator_Delete_RemoveConnectorFailed(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) consMock.EXPECT(). - Delete(gomock.AssignableToTypeOf(ctxType), conn.ID, pluginMock). + Delete(gomock.AssignableToTypeOf(ctxType), conn.ID, connPluginMock). Return(nil) plsMock.EXPECT(). RemoveConnector(gomock.AssignableToTypeOf(ctxType), pl.ID, conn.ID). @@ -448,7 +429,7 @@ func TestConnectorOrchestrator_Delete_RemoveConnectorFailed(t *testing.T) { ). Return(conn, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Connectors.Delete(ctx, conn.ID) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) @@ -458,10 +439,7 @@ func TestConnectorOrchestrator_Update_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - plsMock, consMock, procsMock, pluginMock := newMockServices(t) - - pluginDispenser := pmock.NewDispenser(ctrl) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -494,17 +472,14 @@ func TestConnectorOrchestrator_Update_Success(t *testing.T) { plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - pluginMock.EXPECT(). - NewDispenser(gomock.Any(), conn.Plugin). - Return(pluginDispenser, nil) - pluginMock.EXPECT(). - ValidateSourceConfig(gomock.Any(), pluginDispenser, newConfig.Settings). + connPluginMock.EXPECT(). + ValidateSourceConfig(gomock.Any(), conn.Plugin, newConfig.Settings). Return(nil) consMock.EXPECT(). Update(gomock.AssignableToTypeOf(ctxType), conn.ID, newConfig). Return(want, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Update(ctx, conn.ID, newConfig) is.NoErr(err) is.Equal(got, want) @@ -514,7 +489,7 @@ func TestConnectorOrchestrator_Update_ConnectorNotExist(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) id := uuid.NewString() wantErr := cerrors.New("connector doesn't exist") @@ -522,7 +497,7 @@ func TestConnectorOrchestrator_Update_ConnectorNotExist(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), id). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Update(ctx, id, connector.Config{}) is.True(got == nil) is.True(err != nil) @@ -533,7 +508,7 @@ func TestConnectorOrchestrator_Update_PipelineRunning(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -551,7 +526,7 @@ func TestConnectorOrchestrator_Update_PipelineRunning(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Update(ctx, conn.ID, connector.Config{}) is.True(got == nil) is.True(err != nil) @@ -562,10 +537,7 @@ func TestConnectorOrchestrator_Update_Fail(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - plsMock, consMock, procsMock, pluginMock := newMockServices(t) - - pluginDispenser := pmock.NewDispenser(ctrl) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -584,17 +556,14 @@ func TestConnectorOrchestrator_Update_Fail(t *testing.T) { plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - pluginMock.EXPECT(). - NewDispenser(gomock.Any(), conn.Plugin). - Return(pluginDispenser, nil) - pluginMock.EXPECT(). - ValidateDestinationConfig(gomock.Any(), pluginDispenser, conn.Config.Settings). + connPluginMock.EXPECT(). + ValidateDestinationConfig(gomock.Any(), conn.Plugin, conn.Config.Settings). Return(nil) consMock.EXPECT(). Update(gomock.AssignableToTypeOf(ctxType), conn.ID, connector.Config{}). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Connectors.Update(ctx, conn.ID, connector.Config{}) is.True(got == nil) is.True(err != nil) diff --git a/pkg/orchestrator/mock/orchestrator.go b/pkg/orchestrator/mock/orchestrator.go index 40242efad..bba62e16a 100644 --- a/pkg/orchestrator/mock/orchestrator.go +++ b/pkg/orchestrator/mock/orchestrator.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/conduitio/conduit/pkg/orchestrator (interfaces: PipelineService,ConnectorService,ProcessorService,PluginService) +// Source: github.com/conduitio/conduit/pkg/orchestrator (interfaces: PipelineService,ConnectorService,ProcessorService,ConnectorPluginService,ProcessorPluginService) // // Generated by this command: // -// mockgen -destination=mock/orchestrator.go -package=mock -mock_names=PipelineService=PipelineService,ConnectorService=ConnectorService,ProcessorService=ProcessorService,PluginService=PluginService . PipelineService,ConnectorService,ProcessorService,PluginService +// mockgen -destination=mock/orchestrator.go -package=mock -mock_names=PipelineService=PipelineService,ConnectorService=ConnectorService,ProcessorService=ProcessorService,ConnectorPluginService=ConnectorPluginService,ProcessorPluginService=ProcessorPluginService . PipelineService,ConnectorService,ProcessorService,ConnectorPluginService,ProcessorPluginService // // Package mock is a generated GoMock package. @@ -13,10 +13,11 @@ import ( context "context" reflect "reflect" + sdk "github.com/conduitio/conduit-processor-sdk" connector "github.com/conduitio/conduit/pkg/connector" log "github.com/conduitio/conduit/pkg/foundation/log" pipeline "github.com/conduitio/conduit/pkg/pipeline" - plugin "github.com/conduitio/conduit/pkg/plugin" + connector0 "github.com/conduitio/conduit/pkg/plugin/connector" processor "github.com/conduitio/conduit/pkg/processor" gomock "go.uber.org/mock/gomock" ) @@ -163,7 +164,7 @@ func (mr *PipelineServiceMockRecorder) RemoveProcessor(arg0, arg1, arg2 any) *go } // Start mocks base method. -func (m *PipelineService) Start(arg0 context.Context, arg1 pipeline.ConnectorFetcher, arg2 pipeline.ProcessorFetcher, arg3 pipeline.PluginDispenserFetcher, arg4 string) error { +func (m *PipelineService) Start(arg0 context.Context, arg1 pipeline.ConnectorFetcher, arg2 pipeline.ProcessorService, arg3 pipeline.PluginDispenserFetcher, arg4 string) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Start", arg0, arg1, arg2, arg3, arg4) ret0, _ := ret[0].(error) @@ -427,6 +428,21 @@ func (mr *ProcessorServiceMockRecorder) List(arg0 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*ProcessorService)(nil).List), arg0) } +// MakeRunnableProcessor mocks base method. +func (m *ProcessorService) MakeRunnableProcessor(arg0 context.Context, arg1 *processor.Instance) (*processor.RunnableProcessor, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "MakeRunnableProcessor", arg0, arg1) + ret0, _ := ret[0].(*processor.RunnableProcessor) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// MakeRunnableProcessor indicates an expected call of MakeRunnableProcessor. +func (mr *ProcessorServiceMockRecorder) MakeRunnableProcessor(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "MakeRunnableProcessor", reflect.TypeOf((*ProcessorService)(nil).MakeRunnableProcessor), arg0, arg1) +} + // Update mocks base method. func (m *ProcessorService) Update(arg0 context.Context, arg1 string, arg2 processor.Config) (*processor.Instance, error) { m.ctrl.T.Helper() @@ -442,61 +458,61 @@ func (mr *ProcessorServiceMockRecorder) Update(arg0, arg1, arg2 any) *gomock.Cal return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*ProcessorService)(nil).Update), arg0, arg1, arg2) } -// PluginService is a mock of PluginService interface. -type PluginService struct { +// ConnectorPluginService is a mock of ConnectorPluginService interface. +type ConnectorPluginService struct { ctrl *gomock.Controller - recorder *PluginServiceMockRecorder + recorder *ConnectorPluginServiceMockRecorder } -// PluginServiceMockRecorder is the mock recorder for PluginService. -type PluginServiceMockRecorder struct { - mock *PluginService +// ConnectorPluginServiceMockRecorder is the mock recorder for ConnectorPluginService. +type ConnectorPluginServiceMockRecorder struct { + mock *ConnectorPluginService } -// NewPluginService creates a new mock instance. -func NewPluginService(ctrl *gomock.Controller) *PluginService { - mock := &PluginService{ctrl: ctrl} - mock.recorder = &PluginServiceMockRecorder{mock} +// NewConnectorPluginService creates a new mock instance. +func NewConnectorPluginService(ctrl *gomock.Controller) *ConnectorPluginService { + mock := &ConnectorPluginService{ctrl: ctrl} + mock.recorder = &ConnectorPluginServiceMockRecorder{mock} return mock } // EXPECT returns an object that allows the caller to indicate expected use. -func (m *PluginService) EXPECT() *PluginServiceMockRecorder { +func (m *ConnectorPluginService) EXPECT() *ConnectorPluginServiceMockRecorder { return m.recorder } // List mocks base method. -func (m *PluginService) List(arg0 context.Context) (map[string]plugin.Specification, error) { +func (m *ConnectorPluginService) List(arg0 context.Context) (map[string]connector0.Specification, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "List", arg0) - ret0, _ := ret[0].(map[string]plugin.Specification) + ret0, _ := ret[0].(map[string]connector0.Specification) ret1, _ := ret[1].(error) return ret0, ret1 } // List indicates an expected call of List. -func (mr *PluginServiceMockRecorder) List(arg0 any) *gomock.Call { +func (mr *ConnectorPluginServiceMockRecorder) List(arg0 any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*PluginService)(nil).List), arg0) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*ConnectorPluginService)(nil).List), arg0) } // NewDispenser mocks base method. -func (m *PluginService) NewDispenser(arg0 log.CtxLogger, arg1 string) (plugin.Dispenser, error) { +func (m *ConnectorPluginService) NewDispenser(arg0 log.CtxLogger, arg1 string) (connector0.Dispenser, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "NewDispenser", arg0, arg1) - ret0, _ := ret[0].(plugin.Dispenser) + ret0, _ := ret[0].(connector0.Dispenser) ret1, _ := ret[1].(error) return ret0, ret1 } // NewDispenser indicates an expected call of NewDispenser. -func (mr *PluginServiceMockRecorder) NewDispenser(arg0, arg1 any) *gomock.Call { +func (mr *ConnectorPluginServiceMockRecorder) NewDispenser(arg0, arg1 any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewDispenser", reflect.TypeOf((*PluginService)(nil).NewDispenser), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewDispenser", reflect.TypeOf((*ConnectorPluginService)(nil).NewDispenser), arg0, arg1) } // ValidateDestinationConfig mocks base method. -func (m *PluginService) ValidateDestinationConfig(arg0 context.Context, arg1 plugin.Dispenser, arg2 map[string]string) error { +func (m *ConnectorPluginService) ValidateDestinationConfig(arg0 context.Context, arg1 string, arg2 map[string]string) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ValidateDestinationConfig", arg0, arg1, arg2) ret0, _ := ret[0].(error) @@ -504,13 +520,13 @@ func (m *PluginService) ValidateDestinationConfig(arg0 context.Context, arg1 plu } // ValidateDestinationConfig indicates an expected call of ValidateDestinationConfig. -func (mr *PluginServiceMockRecorder) ValidateDestinationConfig(arg0, arg1, arg2 any) *gomock.Call { +func (mr *ConnectorPluginServiceMockRecorder) ValidateDestinationConfig(arg0, arg1, arg2 any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateDestinationConfig", reflect.TypeOf((*PluginService)(nil).ValidateDestinationConfig), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateDestinationConfig", reflect.TypeOf((*ConnectorPluginService)(nil).ValidateDestinationConfig), arg0, arg1, arg2) } // ValidateSourceConfig mocks base method. -func (m *PluginService) ValidateSourceConfig(arg0 context.Context, arg1 plugin.Dispenser, arg2 map[string]string) error { +func (m *ConnectorPluginService) ValidateSourceConfig(arg0 context.Context, arg1 string, arg2 map[string]string) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "ValidateSourceConfig", arg0, arg1, arg2) ret0, _ := ret[0].(error) @@ -518,7 +534,60 @@ func (m *PluginService) ValidateSourceConfig(arg0 context.Context, arg1 plugin.D } // ValidateSourceConfig indicates an expected call of ValidateSourceConfig. -func (mr *PluginServiceMockRecorder) ValidateSourceConfig(arg0, arg1, arg2 any) *gomock.Call { +func (mr *ConnectorPluginServiceMockRecorder) ValidateSourceConfig(arg0, arg1, arg2 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateSourceConfig", reflect.TypeOf((*ConnectorPluginService)(nil).ValidateSourceConfig), arg0, arg1, arg2) +} + +// ProcessorPluginService is a mock of ProcessorPluginService interface. +type ProcessorPluginService struct { + ctrl *gomock.Controller + recorder *ProcessorPluginServiceMockRecorder +} + +// ProcessorPluginServiceMockRecorder is the mock recorder for ProcessorPluginService. +type ProcessorPluginServiceMockRecorder struct { + mock *ProcessorPluginService +} + +// NewProcessorPluginService creates a new mock instance. +func NewProcessorPluginService(ctrl *gomock.Controller) *ProcessorPluginService { + mock := &ProcessorPluginService{ctrl: ctrl} + mock.recorder = &ProcessorPluginServiceMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *ProcessorPluginService) EXPECT() *ProcessorPluginServiceMockRecorder { + return m.recorder +} + +// List mocks base method. +func (m *ProcessorPluginService) List(arg0 context.Context) (map[string]sdk.Specification, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "List", arg0) + ret0, _ := ret[0].(map[string]sdk.Specification) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// List indicates an expected call of List. +func (mr *ProcessorPluginServiceMockRecorder) List(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*ProcessorPluginService)(nil).List), arg0) +} + +// NewProcessor mocks base method. +func (m *ProcessorPluginService) NewProcessor(arg0 context.Context, arg1, arg2 string) (sdk.Processor, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "NewProcessor", arg0, arg1, arg2) + ret0, _ := ret[0].(sdk.Processor) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// NewProcessor indicates an expected call of NewProcessor. +func (mr *ProcessorPluginServiceMockRecorder) NewProcessor(arg0, arg1, arg2 any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateSourceConfig", reflect.TypeOf((*PluginService)(nil).ValidateSourceConfig), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewProcessor", reflect.TypeOf((*ProcessorPluginService)(nil).NewProcessor), arg0, arg1, arg2) } diff --git a/pkg/orchestrator/orchestrator.go b/pkg/orchestrator/orchestrator.go index 2c1316c57..211d6bf06 100644 --- a/pkg/orchestrator/orchestrator.go +++ b/pkg/orchestrator/orchestrator.go @@ -12,26 +12,28 @@ // See the License for the specific language governing permissions and // limitations under the License. -//go:generate mockgen -destination=mock/orchestrator.go -package=mock -mock_names=PipelineService=PipelineService,ConnectorService=ConnectorService,ProcessorService=ProcessorService,PluginService=PluginService . PipelineService,ConnectorService,ProcessorService,PluginService +//go:generate mockgen -destination=mock/orchestrator.go -package=mock -mock_names=PipelineService=PipelineService,ConnectorService=ConnectorService,ProcessorService=ProcessorService,ConnectorPluginService=ConnectorPluginService,ProcessorPluginService=ProcessorPluginService . PipelineService,ConnectorService,ProcessorService,ConnectorPluginService,ProcessorPluginService package orchestrator import ( "context" + processorSdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/connector" "github.com/conduitio/conduit/pkg/foundation/database" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/pipeline" - "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/processor" ) type Orchestrator struct { - Processors *ProcessorOrchestrator - Pipelines *PipelineOrchestrator - Connectors *ConnectorOrchestrator - Plugins *PluginOrchestrator + Processors *ProcessorOrchestrator + Pipelines *PipelineOrchestrator + Connectors *ConnectorOrchestrator + ConnectorPlugins *ConnectorPluginOrchestrator + ProcessorPlugins *ProcessorPluginOrchestrator } func NewOrchestrator( @@ -40,22 +42,25 @@ func NewOrchestrator( pipelines PipelineService, connectors ConnectorService, processors ProcessorService, - plugins PluginService, + connectorPlugins ConnectorPluginService, + processorPlugins ProcessorPluginService, ) *Orchestrator { b := base{ - db: db, - logger: logger.WithComponent("orchestrator"), - pipelines: pipelines, - connectors: connectors, - processors: processors, - plugins: plugins, + db: db, + logger: logger.WithComponent("orchestrator"), + pipelines: pipelines, + connectors: connectors, + processors: processors, + connectorPlugins: connectorPlugins, + processorPlugins: processorPlugins, } return &Orchestrator{ - Processors: (*ProcessorOrchestrator)(&b), - Pipelines: (*PipelineOrchestrator)(&b), - Connectors: (*ConnectorOrchestrator)(&b), - Plugins: (*PluginOrchestrator)(&b), + Processors: (*ProcessorOrchestrator)(&b), + Pipelines: (*PipelineOrchestrator)(&b), + Connectors: (*ConnectorOrchestrator)(&b), + ConnectorPlugins: (*ConnectorPluginOrchestrator)(&b), + ProcessorPlugins: (*ProcessorPluginOrchestrator)(&b), } } @@ -63,14 +68,15 @@ type base struct { db database.DB logger log.CtxLogger - pipelines PipelineService - connectors ConnectorService - processors ProcessorService - plugins PluginService + pipelines PipelineService + connectors ConnectorService + processors ProcessorService + connectorPlugins ConnectorPluginService + processorPlugins ProcessorPluginService } type PipelineService interface { - Start(ctx context.Context, connFetcher pipeline.ConnectorFetcher, procFetcher pipeline.ProcessorFetcher, pluginFetcher pipeline.PluginDispenserFetcher, pipelineID string) error + Start(ctx context.Context, connFetcher pipeline.ConnectorFetcher, procService pipeline.ProcessorService, pluginFetcher pipeline.PluginDispenserFetcher, pipelineID string) error // Stop initiates a stop of the given pipeline. The method does not wait for // the pipeline (and its nodes) to actually stop. // When force is false the pipeline will try to stop gracefully and drain @@ -107,14 +113,20 @@ type ConnectorService interface { type ProcessorService interface { List(ctx context.Context) map[string]*processor.Instance Get(ctx context.Context, id string) (*processor.Instance, error) - Create(ctx context.Context, id string, procType string, parent processor.Parent, cfg processor.Config, p processor.ProvisionType, condition string) (*processor.Instance, error) + Create(ctx context.Context, id string, plugin string, parent processor.Parent, cfg processor.Config, p processor.ProvisionType, condition string) (*processor.Instance, error) + MakeRunnableProcessor(ctx context.Context, i *processor.Instance) (*processor.RunnableProcessor, error) Update(ctx context.Context, id string, cfg processor.Config) (*processor.Instance, error) Delete(ctx context.Context, id string) error } -type PluginService interface { - List(ctx context.Context) (map[string]plugin.Specification, error) - NewDispenser(logger log.CtxLogger, name string) (plugin.Dispenser, error) - ValidateSourceConfig(ctx context.Context, d plugin.Dispenser, settings map[string]string) error - ValidateDestinationConfig(ctx context.Context, d plugin.Dispenser, settings map[string]string) error +type ConnectorPluginService interface { + List(ctx context.Context) (map[string]connectorPlugin.Specification, error) + NewDispenser(logger log.CtxLogger, name string) (connectorPlugin.Dispenser, error) + ValidateSourceConfig(ctx context.Context, name string, settings map[string]string) error + ValidateDestinationConfig(ctx context.Context, name string, settings map[string]string) error +} + +type ProcessorPluginService interface { + List(ctx context.Context) (map[string]processorSdk.Specification, error) + NewProcessor(ctx context.Context, pluginName string, id string) (processorSdk.Processor, error) } diff --git a/pkg/orchestrator/orchestrator_test.go b/pkg/orchestrator/orchestrator_test.go index 2803a71db..6743e30ec 100644 --- a/pkg/orchestrator/orchestrator_test.go +++ b/pkg/orchestrator/orchestrator_test.go @@ -22,17 +22,18 @@ import ( "testing" "time" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/connector" "github.com/conduitio/conduit/pkg/foundation/ctxutil" "github.com/conduitio/conduit/pkg/foundation/database/badger" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/orchestrator/mock" "github.com/conduitio/conduit/pkg/pipeline" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/builtin" - "github.com/conduitio/conduit/pkg/plugin/standalone" + conn_plugin "github.com/conduitio/conduit/pkg/plugin/connector" + conn_builtin "github.com/conduitio/conduit/pkg/plugin/connector/builtin" + conn_standalone "github.com/conduitio/conduit/pkg/plugin/connector/standalone" "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/processor/procbuiltin" "github.com/conduitio/conduit/pkg/record" "github.com/google/go-cmp/cmp" "github.com/matryer/is" @@ -44,13 +45,14 @@ import ( // a context is passed to a function. var ctxType = reflect.TypeOf((*context.Context)(nil)).Elem() -func newMockServices(t *testing.T) (*mock.PipelineService, *mock.ConnectorService, *mock.ProcessorService, *mock.PluginService) { +func newMockServices(t *testing.T) (*mock.PipelineService, *mock.ConnectorService, *mock.ProcessorService, *mock.ConnectorPluginService, *mock.ProcessorPluginService) { ctrl := gomock.NewController(t) return mock.NewPipelineService(ctrl), mock.NewConnectorService(ctrl), mock.NewProcessorService(ctrl), - mock.NewPluginService(ctrl) + mock.NewConnectorPluginService(ctrl), + mock.NewProcessorPluginService(ctrl) } func TestPipelineSimple(t *testing.T) { @@ -68,31 +70,36 @@ func TestPipelineSimple(t *testing.T) { is.NoErr(err) }) - pluginService := plugin.NewService( + connPluginService := conn_plugin.NewPluginService( logger, - builtin.NewRegistry(logger, builtin.DefaultDispenserFactories), - standalone.NewRegistry(logger, ""), + conn_builtin.NewRegistry(logger, conn_builtin.DefaultDispenserFactories), + conn_standalone.NewRegistry(logger, ""), ) + procPluginService := mock.NewProcessorPluginService(gomock.NewController(t)) + procPluginService.EXPECT(). + NewProcessor(gomock.Any(), "removereadat", gomock.Any()). + Return( + sdk.NewProcessorFunc(sdk.Specification{Name: "removereadat"}, func(ctx context.Context, r opencdc.Record) (opencdc.Record, error) { + delete(r.Metadata, record.MetadataReadAt) // read at is different every time, remove it + return r, nil + }), + nil, + ). + // once when creating a processor instance (to verify the plugin exists) + // and once when building the pipeline nodes (to make a runnable processor) + Times(2) + orc := NewOrchestrator( db, logger, pipeline.NewService(logger, db), connector.NewService(logger, db, connector.NewPersister(logger, db, time.Second, 3)), - processor.NewService(logger, db, processor.GlobalBuilderRegistry), - pluginService, + processor.NewService(logger, db, procPluginService), + connPluginService, + procPluginService, ) - // add builtin processor for removing metadata - // TODO at the time of writing we don't have a processor for manipulating - // metadata, once we have it we can use it instead of adding our own - processor.GlobalBuilderRegistry.MustRegister("removereadat", func(config processor.Config) (processor.Interface, error) { - return procbuiltin.NewFuncWrapper(func(ctx context.Context, r record.Record) (record.Record, error) { - delete(r.Metadata, record.MetadataReadAt) // read at is different every time, remove it - return r, nil - }), nil - }) - // create a host pipeline pl, err := orc.Pipelines.Create(ctx, pipeline.Config{Name: "test pipeline"}) is.NoErr(err) diff --git a/pkg/orchestrator/pipelines.go b/pkg/orchestrator/pipelines.go index 63fda9ee5..2eb9ea7de 100644 --- a/pkg/orchestrator/pipelines.go +++ b/pkg/orchestrator/pipelines.go @@ -27,7 +27,7 @@ type PipelineOrchestrator base func (po *PipelineOrchestrator) Start(ctx context.Context, id string) error { // TODO lock pipeline - return po.pipelines.Start(ctx, po.connectors, po.processors, po.plugins, id) + return po.pipelines.Start(ctx, po.connectors, po.processors, po.connectorPlugins, id) } func (po *PipelineOrchestrator) Stop(ctx context.Context, id string, force bool) error { diff --git a/pkg/orchestrator/pipelines_test.go b/pkg/orchestrator/pipelines_test.go index 4fca4d76c..157173097 100644 --- a/pkg/orchestrator/pipelines_test.go +++ b/pkg/orchestrator/pipelines_test.go @@ -22,7 +22,6 @@ import ( "github.com/conduitio/conduit/pkg/foundation/database/inmemory" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/pipeline" - pmock "github.com/conduitio/conduit/pkg/plugin/mock" "github.com/google/uuid" "github.com/matryer/is" "go.uber.org/mock/gomock" @@ -32,16 +31,16 @@ func TestPipelineOrchestrator_Start_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). - Start(gomock.AssignableToTypeOf(ctxType), orc.Pipelines.connectors, orc.Pipelines.processors, orc.Pipelines.plugins, plBefore.ID). + Start(gomock.AssignableToTypeOf(ctxType), orc.Pipelines.connectors, orc.Pipelines.processors, orc.Pipelines.connectorPlugins, plBefore.ID). Return(nil) err := orc.Pipelines.Start(ctx, plBefore.ID) @@ -52,7 +51,7 @@ func TestPipelineOrchestrator_Start_Fail(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -61,10 +60,10 @@ func TestPipelineOrchestrator_Start_Fail(t *testing.T) { wantErr := cerrors.New("pipeline doesn't exist") plsMock.EXPECT(). - Start(gomock.AssignableToTypeOf(ctxType), consMock, procsMock, pluginMock, gomock.AssignableToTypeOf("")). + Start(gomock.AssignableToTypeOf(ctxType), consMock, procsMock, connPluginMock, gomock.AssignableToTypeOf("")). Return(wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Pipelines.Start(ctx, plBefore.ID) is.True(cerrors.Is(err, wantErr)) } @@ -73,14 +72,14 @@ func TestPipelineOrchestrator_Stop_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusRunning, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Stop(gomock.AssignableToTypeOf(ctxType), plBefore.ID, false). Return(nil) @@ -93,7 +92,7 @@ func TestPipelineOrchestrator_Stop_Fail(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -105,7 +104,7 @@ func TestPipelineOrchestrator_Stop_Fail(t *testing.T) { Stop(gomock.AssignableToTypeOf(ctxType), gomock.AssignableToTypeOf(""), true). Return(wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Pipelines.Stop(ctx, plBefore.ID, true) is.True(cerrors.Is(err, wantErr)) } @@ -114,7 +113,7 @@ func TestPipelineOrchestrator_Update_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -128,7 +127,7 @@ func TestPipelineOrchestrator_Update_Success(t *testing.T) { Config: pipeline.Config{Name: "new pipeline"}, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -145,7 +144,7 @@ func TestPipelineOrchestrator_Update_PipelineRunning(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -154,7 +153,7 @@ func TestPipelineOrchestrator_Update_PipelineRunning(t *testing.T) { } newConfig := pipeline.Config{Name: "new pipeline"} - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -168,7 +167,7 @@ func TestPipelineOrchestrator_Update_PipelineProvisionedByConfig(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -178,7 +177,7 @@ func TestPipelineOrchestrator_Update_PipelineProvisionedByConfig(t *testing.T) { } newConfig := pipeline.Config{Name: "new pipeline"} - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -192,14 +191,14 @@ func TestPipelineOrchestrator_Delete_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -215,14 +214,14 @@ func TestPipelineOrchestrator_Delete_PipelineRunning(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusRunning, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -235,7 +234,7 @@ func TestPipelineOrchestrator_Delete_PipelineProvisionedByConfig(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -243,7 +242,7 @@ func TestPipelineOrchestrator_Delete_PipelineProvisionedByConfig(t *testing.T) { ProvisionedBy: pipeline.ProvisionTypeConfig, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -256,7 +255,7 @@ func TestPipelineOrchestrator_Delete_PipelineHasProcessorsAttached(t *testing.T) is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -264,7 +263,7 @@ func TestPipelineOrchestrator_Delete_PipelineHasProcessorsAttached(t *testing.T) ProcessorIDs: []string{uuid.NewString()}, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -277,7 +276,7 @@ func TestPipelineOrchestrator_Delete_PipelineHasConnectorsAttached(t *testing.T) is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -285,7 +284,7 @@ func TestPipelineOrchestrator_Delete_PipelineHasConnectorsAttached(t *testing.T) ConnectorIDs: []string{uuid.NewString()}, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -298,10 +297,10 @@ func TestPipelineOrchestrator_Delete_PipelineDoesntExist(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) wantErr := cerrors.New("pipeline doesn't exist") - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), gomock.AssignableToTypeOf("")). Return(nil, wantErr) @@ -314,10 +313,7 @@ func TestPipelineOrchestrator_UpdateDLQ_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - plsMock, consMock, procsMock, pluginMock := newMockServices(t) - - pluginDispenser := pmock.NewDispenser(ctrl) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -343,15 +339,12 @@ func TestPipelineOrchestrator_UpdateDLQ_Success(t *testing.T) { DLQ: newDLQ, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) - pluginMock.EXPECT(). - NewDispenser(gomock.Any(), newDLQ.Plugin). - Return(pluginDispenser, nil) - pluginMock.EXPECT(). - ValidateDestinationConfig(gomock.Any(), pluginDispenser, newDLQ.Settings). + connPluginMock.EXPECT(). + ValidateDestinationConfig(gomock.Any(), newDLQ.Plugin, newDLQ.Settings). Return(nil) plsMock.EXPECT(). UpdateDLQ(gomock.AssignableToTypeOf(ctxType), plBefore.ID, newDLQ). @@ -366,14 +359,14 @@ func TestPipelineOrchestrator_UpdateDLQ_PipelineRunning(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusRunning, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -387,7 +380,7 @@ func TestPipelineOrchestrator_UpdateDLQ_PipelineProvisionedByConfig(t *testing.T is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -395,7 +388,7 @@ func TestPipelineOrchestrator_UpdateDLQ_PipelineProvisionedByConfig(t *testing.T ProvisionedBy: pipeline.ProvisionTypeConfig, } - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) @@ -409,10 +402,7 @@ func TestConnectorOrchestrator_UpdateDLQ_InvalidConfig(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - plsMock, consMock, procsMock, pluginMock := newMockServices(t) - - pluginDispenser := pmock.NewDispenser(ctrl) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) plBefore := &pipeline.Instance{ ID: uuid.NewString(), @@ -427,15 +417,12 @@ func TestConnectorOrchestrator_UpdateDLQ_InvalidConfig(t *testing.T) { } wantErr := cerrors.New("invalid plugin config") - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) plsMock.EXPECT(). Get(gomock.AssignableToTypeOf(ctxType), plBefore.ID). Return(plBefore, nil) - pluginMock.EXPECT(). - NewDispenser(gomock.Any(), newDLQ.Plugin). - Return(pluginDispenser, nil) - pluginMock.EXPECT(). - ValidateDestinationConfig(gomock.Any(), pluginDispenser, newDLQ.Settings). + connPluginMock.EXPECT(). + ValidateDestinationConfig(gomock.Any(), newDLQ.Plugin, newDLQ.Settings). Return(wantErr) got, err := orc.Pipelines.UpdateDLQ(ctx, plBefore.ID, newDLQ) diff --git a/pkg/orchestrator/processor_plugins.go b/pkg/orchestrator/processor_plugins.go new file mode 100644 index 000000000..68e9d5951 --- /dev/null +++ b/pkg/orchestrator/processor_plugins.go @@ -0,0 +1,27 @@ +// Copyright © 2022 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package orchestrator + +import ( + "context" + + processorSdk "github.com/conduitio/conduit-processor-sdk" +) + +type ProcessorPluginOrchestrator base + +func (ps *ProcessorPluginOrchestrator) List(ctx context.Context) (map[string]processorSdk.Specification, error) { + return ps.processorPlugins.List(ctx) +} diff --git a/pkg/orchestrator/processors.go b/pkg/orchestrator/processors.go index 8e25cf479..9ccf95084 100644 --- a/pkg/orchestrator/processors.go +++ b/pkg/orchestrator/processors.go @@ -29,7 +29,7 @@ type ProcessorOrchestrator base func (p *ProcessorOrchestrator) Create( ctx context.Context, - procType string, + plugin string, parent processor.Parent, cfg processor.Config, cond string, @@ -58,7 +58,15 @@ func (p *ProcessorOrchestrator) Create( } // create processor and add to pipeline or connector - proc, err := p.processors.Create(ctx, uuid.NewString(), procType, parent, cfg, processor.ProvisionTypeAPI, cond) + proc, err := p.processors.Create( + ctx, + uuid.NewString(), + plugin, + parent, + cfg, + processor.ProvisionTypeAPI, + cond, + ) if err != nil { return nil, err } @@ -110,7 +118,7 @@ func (p *ProcessorOrchestrator) InspectIn( return nil, err } - return proc.Processor.InspectIn(ctx, proc.ID), nil + return proc.InspectIn(ctx, proc.ID), nil } func (p *ProcessorOrchestrator) InspectOut( @@ -122,7 +130,7 @@ func (p *ProcessorOrchestrator) InspectOut( return nil, err } - return proc.Processor.InspectOut(ctx, proc.ID), nil + return proc.InspectOut(ctx, proc.ID), nil } func (p *ProcessorOrchestrator) Get(ctx context.Context, id string) (*processor.Instance, error) { @@ -213,7 +221,7 @@ func (p *ProcessorOrchestrator) Delete(ctx context.Context, id string) error { return err } r.Append(func() error { - _, err = p.processors.Create(ctx, id, proc.Type, proc.Parent, proc.Config, processor.ProvisionTypeAPI, proc.Condition) + _, err = p.processors.Create(ctx, id, proc.Plugin, proc.Parent, proc.Config, processor.ProvisionTypeAPI, proc.Condition) return err }) diff --git a/pkg/orchestrator/processors_test.go b/pkg/orchestrator/processors_test.go index 3f7f80b4c..63aabe0cd 100644 --- a/pkg/orchestrator/processors_test.go +++ b/pkg/orchestrator/processors_test.go @@ -34,15 +34,15 @@ func TestProcessorOrchestrator_CreateOnPipeline_Success(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -60,7 +60,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_Success(t *testing.T) { Create( gomock.AssignableToTypeOf(ctxType), gomock.AssignableToTypeOf(""), - want.Type, + want.Plugin, want.Parent, want.Config, processor.ProvisionTypeAPI, @@ -71,8 +71,8 @@ func TestProcessorOrchestrator_CreateOnPipeline_Success(t *testing.T) { AddProcessor(gomock.AssignableToTypeOf(ctxType), pl.ID, want.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) - got, err := orc.Processors.Create(ctx, want.Type, want.Parent, want.Config, want.Condition) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) + got, err := orc.Processors.Create(ctx, want.Plugin, want.Parent, want.Config, want.Condition) is.NoErr(err) is.Equal(want, got) } @@ -82,7 +82,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_PipelineNotExist(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) parent := processor.Parent{ ID: uuid.NewString(), @@ -93,7 +93,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_PipelineNotExist(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), parent.ID). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Create(ctx, "test-processor", parent, processor.Config{}, "") is.True(err != nil) is.True(cerrors.Is(err, wantErr)) // errors did not match @@ -105,7 +105,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_PipelineRunning(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -119,7 +119,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_PipelineRunning(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Create(ctx, "test-processor", parent, processor.Config{}, "") is.True(err != nil) is.Equal(pipeline.ErrPipelineRunning, err) @@ -131,7 +131,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_PipelineProvisionedByConfig(t *t ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -146,7 +146,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_PipelineProvisionedByConfig(t *t Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Create(ctx, "test-processor", parent, processor.Config{}, "") is.Equal(got, nil) is.True(err != nil) @@ -158,7 +158,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_CreateProcessorError(t *testing. ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -185,7 +185,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_CreateProcessorError(t *testing. ). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Create(ctx, "test-processor", parent, processor.Config{}, "") is.True(err != nil) is.True(cerrors.Is(err, wantErr)) // errors did not match @@ -197,15 +197,15 @@ func TestProcessorOrchestrator_CreateOnPipeline_AddProcessorError(t *testing.T) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } proc := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -224,7 +224,7 @@ func TestProcessorOrchestrator_CreateOnPipeline_AddProcessorError(t *testing.T) Create( gomock.AssignableToTypeOf(ctxType), gomock.AssignableToTypeOf(""), - proc.Type, + proc.Plugin, proc.Parent, proc.Config, processor.ProvisionTypeAPI, @@ -239,8 +239,8 @@ func TestProcessorOrchestrator_CreateOnPipeline_AddProcessorError(t *testing.T) Delete(gomock.AssignableToTypeOf(ctxType), proc.ID). Return(nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) - got, err := orc.Processors.Create(ctx, proc.Type, proc.Parent, proc.Config, proc.Condition) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) + got, err := orc.Processors.Create(ctx, proc.Plugin, proc.Parent, proc.Config, proc.Condition) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) // errors did not match is.True(got == nil) @@ -251,7 +251,7 @@ func TestProcessorOrchestrator_CreateOnConnector_Success(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -262,8 +262,8 @@ func TestProcessorOrchestrator_CreateOnConnector_Success(t *testing.T) { PipelineID: pl.ID, } want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: conn.ID, Type: processor.ParentTypeConnector, @@ -284,7 +284,7 @@ func TestProcessorOrchestrator_CreateOnConnector_Success(t *testing.T) { Create( gomock.AssignableToTypeOf(ctxType), gomock.AssignableToTypeOf(""), - want.Type, + want.Plugin, want.Parent, want.Config, processor.ProvisionTypeAPI, @@ -295,8 +295,8 @@ func TestProcessorOrchestrator_CreateOnConnector_Success(t *testing.T) { AddProcessor(gomock.AssignableToTypeOf(ctxType), conn.ID, want.ID). Return(conn, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) - got, err := orc.Processors.Create(ctx, want.Type, want.Parent, want.Config, want.Condition) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) + got, err := orc.Processors.Create(ctx, want.Plugin, want.Parent, want.Config, want.Condition) is.NoErr(err) is.Equal(want, got) } @@ -306,7 +306,7 @@ func TestProcessorOrchestrator_CreateOnConnector_ConnectorNotExist(t *testing.T) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) parent := processor.Parent{ ID: uuid.NewString(), @@ -317,7 +317,7 @@ func TestProcessorOrchestrator_CreateOnConnector_ConnectorNotExist(t *testing.T) Get(gomock.AssignableToTypeOf(ctxType), parent.ID). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Create(ctx, "test-processor", parent, processor.Config{}, "") is.True(err != nil) is.True(cerrors.Is(err, wantErr)) // errors did not match @@ -329,15 +329,15 @@ func TestProcessorOrchestrator_UpdateOnPipeline_Success(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } before := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -350,8 +350,8 @@ func TestProcessorOrchestrator_UpdateOnPipeline_Success(t *testing.T) { Settings: map[string]string{"foo2": "bar2"}, } want := &processor.Instance{ - ID: before.ID, - Type: "test-processor", + ID: before.ID, + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -371,7 +371,7 @@ func TestProcessorOrchestrator_UpdateOnPipeline_Success(t *testing.T) { Update(gomock.AssignableToTypeOf(ctxType), want.ID, want.Config). Return(want, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Update(ctx, before.ID, newConfig) is.NoErr(err) is.Equal(want, got) @@ -382,15 +382,15 @@ func TestProcessorOrchestrator_UpdateOnPipeline_ProcessorNotExist(t *testing.T) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } before := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -408,7 +408,7 @@ func TestProcessorOrchestrator_UpdateOnPipeline_ProcessorNotExist(t *testing.T) Get(gomock.AssignableToTypeOf(ctxType), before.ID). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Update(ctx, before.ID, newConfig) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) // errors did not match") @@ -420,15 +420,15 @@ func TestProcessorOrchestrator_UpdateOnPipeline_PipelineRunning(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusRunning, } before := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -448,7 +448,7 @@ func TestProcessorOrchestrator_UpdateOnPipeline_PipelineRunning(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Update(ctx, before.ID, newConfig) is.True(err != nil) is.Equal(pipeline.ErrPipelineRunning, err) @@ -460,7 +460,7 @@ func TestProcessorOrchestrator_UpdateOnPipeline_ProcessorProvisionedByConfig(t * ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -468,8 +468,8 @@ func TestProcessorOrchestrator_UpdateOnPipeline_ProcessorProvisionedByConfig(t * ProvisionedBy: pipeline.ProvisionTypeConfig, } before := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -487,7 +487,7 @@ func TestProcessorOrchestrator_UpdateOnPipeline_ProcessorProvisionedByConfig(t * Get(gomock.AssignableToTypeOf(ctxType), before.ID). Return(before, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Update(ctx, before.ID, newConfig) is.Equal(got, nil) is.True(err != nil) @@ -499,15 +499,15 @@ func TestProcessorOrchestrator_UpdateOnPipeline_UpdateFail(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } before := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -520,8 +520,8 @@ func TestProcessorOrchestrator_UpdateOnPipeline_UpdateFail(t *testing.T) { Settings: map[string]string{"foo2": "bar2"}, } want := &processor.Instance{ - ID: before.ID, - Type: "test-processor", + ID: before.ID, + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -542,7 +542,7 @@ func TestProcessorOrchestrator_UpdateOnPipeline_UpdateFail(t *testing.T) { Update(gomock.AssignableToTypeOf(ctxType), want.ID, want.Config). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Update(ctx, before.ID, newConfig) is.True(err != nil) is.Equal(wantErr, err) @@ -554,12 +554,12 @@ func TestProcessorOrchestrator_UpdateOnConnector_ConnectorNotExist(t *testing.T) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) connID := uuid.NewString() want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: connID, Type: processor.ParentTypeConnector, @@ -574,7 +574,7 @@ func TestProcessorOrchestrator_UpdateOnConnector_ConnectorNotExist(t *testing.T) Get(gomock.AssignableToTypeOf(ctxType), connID). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) got, err := orc.Processors.Update(ctx, want.ID, processor.Config{}) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) // errors did not match @@ -586,15 +586,15 @@ func TestProcessorOrchestrator_DeleteOnPipeline_Success(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -617,7 +617,7 @@ func TestProcessorOrchestrator_DeleteOnPipeline_Success(t *testing.T) { RemoveProcessor(gomock.AssignableToTypeOf(ctxType), pl.ID, want.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Processors.Delete(ctx, want.ID) is.NoErr(err) } @@ -627,15 +627,15 @@ func TestProcessorOrchestrator_DeleteOnPipeline_ProcessorNotExist(t *testing.T) ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -650,7 +650,7 @@ func TestProcessorOrchestrator_DeleteOnPipeline_ProcessorNotExist(t *testing.T) Get(gomock.AssignableToTypeOf(ctxType), want.ID). Return(nil, wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Processors.Delete(ctx, want.ID) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) // errors did not match @@ -661,15 +661,15 @@ func TestProcessorOrchestrator_DeleteOnPipeline_PipelineRunning(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusRunning, } want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -686,7 +686,7 @@ func TestProcessorOrchestrator_DeleteOnPipeline_PipelineRunning(t *testing.T) { Get(gomock.AssignableToTypeOf(ctxType), pl.ID). Return(pl, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Processors.Delete(ctx, want.ID) is.True(err != nil) is.Equal(pipeline.ErrPipelineRunning, err) @@ -697,15 +697,15 @@ func TestProcessorOrchestrator_DeleteOnPipeline_Fail(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -726,7 +726,7 @@ func TestProcessorOrchestrator_DeleteOnPipeline_Fail(t *testing.T) { Delete(gomock.AssignableToTypeOf(ctxType), want.ID). Return(wantErr) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Processors.Delete(ctx, want.ID) is.True(err != nil) is.True(cerrors.Is(err, wantErr)) // errors did not match @@ -737,15 +737,15 @@ func TestProcessorOrchestrator_DeleteOnPipeline_RemoveProcessorFail(t *testing.T ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), Status: pipeline.StatusSystemStopped, } want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: pl.ID, Type: processor.ParentTypePipeline, @@ -774,7 +774,7 @@ func TestProcessorOrchestrator_DeleteOnPipeline_RemoveProcessorFail(t *testing.T Create( gomock.AssignableToTypeOf(ctxType), want.ID, - want.Type, + want.Plugin, want.Parent, want.Config, processor.ProvisionTypeAPI, @@ -782,7 +782,7 @@ func TestProcessorOrchestrator_DeleteOnPipeline_RemoveProcessorFail(t *testing.T ). Return(want, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Processors.Delete(ctx, want.ID) is.True(err != nil) } @@ -792,7 +792,7 @@ func TestProcessorOrchestrator_DeleteOnConnector_Fail(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - plsMock, consMock, procsMock, pluginMock := newMockServices(t) + plsMock, consMock, procsMock, connPluginMock, procPluginMock := newMockServices(t) pl := &pipeline.Instance{ ID: uuid.NewString(), @@ -803,8 +803,8 @@ func TestProcessorOrchestrator_DeleteOnConnector_Fail(t *testing.T) { PipelineID: pl.ID, } want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: conn.ID, Type: processor.ParentTypeConnector, @@ -836,7 +836,7 @@ func TestProcessorOrchestrator_DeleteOnConnector_Fail(t *testing.T) { Create( gomock.AssignableToTypeOf(ctxType), want.ID, - want.Type, + want.Plugin, want.Parent, want.Config, processor.ProvisionTypeAPI, @@ -844,7 +844,7 @@ func TestProcessorOrchestrator_DeleteOnConnector_Fail(t *testing.T) { ). Return(want, nil) - orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, pluginMock) + orc := NewOrchestrator(db, log.Nop(), plsMock, consMock, procsMock, connPluginMock, procPluginMock) err := orc.Processors.Delete(ctx, want.ID) is.True(err != nil) } diff --git a/pkg/pipeline/lifecycle.go b/pkg/pipeline/lifecycle.go index 335ffd7dd..52e0583ac 100644 --- a/pkg/pipeline/lifecycle.go +++ b/pkg/pipeline/lifecycle.go @@ -30,7 +30,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/metrics/measure" "github.com/conduitio/conduit/pkg/foundation/multierror" "github.com/conduitio/conduit/pkg/pipeline/stream" - "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/processor" "gopkg.in/tomb.v2" ) @@ -41,21 +41,22 @@ type ConnectorFetcher interface { Create(ctx context.Context, id string, t connector.Type, plugin string, pipelineID string, cfg connector.Config, p connector.ProvisionType) (*connector.Instance, error) } -// ProcessorFetcher can fetch a processor instance. -type ProcessorFetcher interface { +// ProcessorService can fetch a processor instance and make a runnable processor from it. +type ProcessorService interface { Get(ctx context.Context, id string) (*processor.Instance, error) + MakeRunnableProcessor(ctx context.Context, i *processor.Instance) (*processor.RunnableProcessor, error) } // PluginDispenserFetcher can fetch a plugin. type PluginDispenserFetcher interface { - NewDispenser(logger log.CtxLogger, name string) (plugin.Dispenser, error) + NewDispenser(logger log.CtxLogger, name string) (connectorPlugin.Dispenser, error) } // Run runs pipelines that had the running state in store. func (s *Service) Run( ctx context.Context, connFetcher ConnectorFetcher, - procFetcher ProcessorFetcher, + procService ProcessorService, pluginFetcher PluginDispenserFetcher, ) error { var err error @@ -64,7 +65,7 @@ func (s *Service) Run( // run pipelines that are in the StatusSystemStopped state for _, instance := range s.instances { if instance.Status == StatusSystemStopped { - startErr := s.Start(ctx, connFetcher, procFetcher, pluginFetcher, instance.ID) + startErr := s.Start(ctx, connFetcher, procService, pluginFetcher, instance.ID) if startErr != nil { // try to start remaining pipelines and gather errors err = multierror.Append(err, startErr) @@ -80,7 +81,7 @@ func (s *Service) Run( func (s *Service) Start( ctx context.Context, connFetcher ConnectorFetcher, - procFetcher ProcessorFetcher, + procService ProcessorService, pluginFetcher PluginDispenserFetcher, pipelineID string, ) error { @@ -95,7 +96,7 @@ func (s *Service) Start( s.logger.Debug(ctx).Str(log.PipelineIDField, pl.ID).Msg("starting pipeline") s.logger.Trace(ctx).Str(log.PipelineIDField, pl.ID).Msg("building nodes") - nodes, err := s.buildNodes(ctx, connFetcher, procFetcher, pluginFetcher, pl) + nodes, err := s.buildNodes(ctx, connFetcher, procService, pluginFetcher, pl) if err != nil { return cerrors.Errorf("could not build nodes for pipeline %s: %w", pl.ID, err) } @@ -226,7 +227,7 @@ func (s *Service) waitInternal() error { func (s *Service) buildNodes( ctx context.Context, connFetcher ConnectorFetcher, - procFetcher ProcessorFetcher, + procService ProcessorService, pluginFetcher PluginDispenserFetcher, pl *Instance, ) ([]stream.Node, error) { @@ -234,7 +235,7 @@ func (s *Service) buildNodes( fanIn := stream.FaninNode{Name: "fanin"} fanOut := stream.FanoutNode{Name: "fanout"} - sourceNodes, err := s.buildSourceNodes(ctx, connFetcher, procFetcher, pluginFetcher, pl, &fanIn) + sourceNodes, err := s.buildSourceNodes(ctx, connFetcher, procService, pluginFetcher, pl, &fanIn) if err != nil { return nil, cerrors.Errorf("could not build source nodes: %w", err) } @@ -242,12 +243,12 @@ func (s *Service) buildNodes( return nil, cerrors.New("can't build pipeline without any source connectors") } - processorNodes, err := s.buildProcessorNodes(ctx, procFetcher, pl, pl.ProcessorIDs, &fanIn, &fanOut) + processorNodes, err := s.buildProcessorNodes(ctx, procService, pl, pl.ProcessorIDs, &fanIn, &fanOut) if err != nil { return nil, cerrors.Errorf("could not build processor nodes: %w", err) } - destinationNodes, err := s.buildDestinationNodes(ctx, connFetcher, procFetcher, pluginFetcher, pl, &fanOut) + destinationNodes, err := s.buildDestinationNodes(ctx, connFetcher, procService, pluginFetcher, pl, &fanOut) if err != nil { return nil, cerrors.Errorf("could not build destination nodes: %w", err) } @@ -275,7 +276,7 @@ func (s *Service) buildNodes( func (s *Service) buildProcessorNodes( ctx context.Context, - procFetcher ProcessorFetcher, + procService ProcessorService, pl *Instance, processorIDs []string, first stream.PubNode, @@ -285,16 +286,21 @@ func (s *Service) buildProcessorNodes( prev := first for _, procID := range processorIDs { - proc, err := procFetcher.Get(ctx, procID) + instance, err := procService.Get(ctx, procID) if err != nil { return nil, cerrors.Errorf("could not fetch processor: %w", err) } + runnableProc, err := procService.MakeRunnableProcessor(ctx, instance) + if err != nil { + return nil, err + } + var node stream.PubSubNode - if proc.Config.Workers > 1 { - node = s.buildParallelProcessorNode(pl, proc) + if instance.Config.Workers > 1 { + node = s.buildParallelProcessorNode(pl, runnableProc) } else { - node = s.buildProcessorNode(pl, proc) + node = s.buildProcessorNode(pl, runnableProc) } node.Sub(prev.Pub()) @@ -309,7 +315,7 @@ func (s *Service) buildProcessorNodes( func (s *Service) buildParallelProcessorNode( pl *Instance, - proc *processor.Instance, + proc *processor.RunnableProcessor, ) *stream.ParallelNode { return &stream.ParallelNode{ Name: proc.ID + "-parallel", @@ -324,19 +330,19 @@ func (s *Service) buildParallelProcessorNode( func (s *Service) buildProcessorNode( pl *Instance, - proc *processor.Instance, + proc *processor.RunnableProcessor, ) *stream.ProcessorNode { return &stream.ProcessorNode{ Name: proc.ID, - Processor: proc.Processor, - ProcessorTimer: measure.ProcessorExecutionDurationTimer.WithValues(pl.Config.Name, proc.Type), + Processor: proc, + ProcessorTimer: measure.ProcessorExecutionDurationTimer.WithValues(pl.Config.Name, proc.Plugin), } } func (s *Service) buildSourceNodes( ctx context.Context, connFetcher ConnectorFetcher, - procFetcher ProcessorFetcher, + procService ProcessorService, pluginFetcher PluginDispenserFetcher, pl *Instance, next stream.SubNode, @@ -377,7 +383,7 @@ func (s *Service) buildSourceNodes( metricsNode := s.buildMetricsNode(pl, instance) metricsNode.Sub(ackerNode.Pub()) - procNodes, err := s.buildProcessorNodes(ctx, procFetcher, pl, instance.ProcessorIDs, metricsNode, next) + procNodes, err := s.buildProcessorNodes(ctx, procService, pl, instance.ProcessorIDs, metricsNode, next) if err != nil { return nil, cerrors.Errorf("could not build processor nodes for connector %s: %w", instance.ID, err) } @@ -475,7 +481,7 @@ func (s *Service) buildDestinationAckerNode( func (s *Service) buildDestinationNodes( ctx context.Context, connFetcher ConnectorFetcher, - procFetcher ProcessorFetcher, + procService ProcessorService, pluginFetcher PluginDispenserFetcher, pl *Instance, prev stream.PubNode, @@ -511,7 +517,7 @@ func (s *Service) buildDestinationNodes( destinationNode.Sub(metricsNode.Pub()) ackerNode.Sub(destinationNode.Pub()) - connNodes, err := s.buildProcessorNodes(ctx, procFetcher, pl, instance.ProcessorIDs, prev, metricsNode) + connNodes, err := s.buildProcessorNodes(ctx, procService, pl, instance.ProcessorIDs, prev, metricsNode) if err != nil { return nil, cerrors.Errorf("could not build processor nodes for connector %s: %w", instance.ID, err) } diff --git a/pkg/pipeline/lifecycle_test.go b/pkg/pipeline/lifecycle_test.go index 63561d6ac..93b81de25 100644 --- a/pkg/pipeline/lifecycle_test.go +++ b/pkg/pipeline/lifecycle_test.go @@ -30,7 +30,8 @@ import ( "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/pipeline/stream" "github.com/conduitio/conduit/pkg/plugin" - pmock "github.com/conduitio/conduit/pkg/plugin/mock" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" + pmock "github.com/conduitio/conduit/pkg/plugin/connector/mock" "github.com/conduitio/conduit/pkg/processor" "github.com/conduitio/conduit/pkg/record" "github.com/google/uuid" @@ -561,9 +562,13 @@ func (tcf testConnectorFetcher) Create(context.Context, string, connector.Type, return tcf[testDLQID], nil } -// testProcessorFetcher fulfills the ProcessorFetcher interface. +// testProcessorFetcher fulfills the ProcessorService interface. type testProcessorFetcher map[string]*processor.Instance +func (tpf testProcessorFetcher) MakeRunnableProcessor(context.Context, *processor.Instance) (*processor.RunnableProcessor, error) { + return nil, cerrors.New("not implemented") +} + func (tpf testProcessorFetcher) Get(_ context.Context, id string) (*processor.Instance, error) { proc, ok := tpf[id] if !ok { @@ -573,9 +578,9 @@ func (tpf testProcessorFetcher) Get(_ context.Context, id string) (*processor.In } // testPluginFetcher fulfills the PluginFetcher interface. -type testPluginFetcher map[string]plugin.Dispenser +type testPluginFetcher map[string]connectorPlugin.Dispenser -func (tpf testPluginFetcher) NewDispenser(_ log.CtxLogger, name string) (plugin.Dispenser, error) { +func (tpf testPluginFetcher) NewDispenser(_ log.CtxLogger, name string) (connectorPlugin.Dispenser, error) { plug, ok := tpf[name] if !ok { return nil, plugin.ErrPluginNotFound diff --git a/pkg/pipeline/stream/mock/processor.go b/pkg/pipeline/stream/mock/processor.go new file mode 100644 index 000000000..f6d3a6bbe --- /dev/null +++ b/pkg/pipeline/stream/mock/processor.go @@ -0,0 +1,84 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/conduitio/conduit/pkg/pipeline/stream (interfaces: Processor) +// +// Generated by this command: +// +// mockgen -destination=mock/processor.go -package=mock -mock_names=Processor=Processor . Processor +// + +// Package mock is a generated GoMock package. +package mock + +import ( + context "context" + reflect "reflect" + + opencdc "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + gomock "go.uber.org/mock/gomock" +) + +// Processor is a mock of Processor interface. +type Processor struct { + ctrl *gomock.Controller + recorder *ProcessorMockRecorder +} + +// ProcessorMockRecorder is the mock recorder for Processor. +type ProcessorMockRecorder struct { + mock *Processor +} + +// NewProcessor creates a new mock instance. +func NewProcessor(ctrl *gomock.Controller) *Processor { + mock := &Processor{ctrl: ctrl} + mock.recorder = &ProcessorMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *Processor) EXPECT() *ProcessorMockRecorder { + return m.recorder +} + +// Open mocks base method. +func (m *Processor) Open(arg0 context.Context) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Open", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Open indicates an expected call of Open. +func (mr *ProcessorMockRecorder) Open(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*Processor)(nil).Open), arg0) +} + +// Process mocks base method. +func (m *Processor) Process(arg0 context.Context, arg1 []opencdc.Record) []sdk.ProcessedRecord { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Process", arg0, arg1) + ret0, _ := ret[0].([]sdk.ProcessedRecord) + return ret0 +} + +// Process indicates an expected call of Process. +func (mr *ProcessorMockRecorder) Process(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Process", reflect.TypeOf((*Processor)(nil).Process), arg0, arg1) +} + +// Teardown mocks base method. +func (m *Processor) Teardown(arg0 context.Context) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Teardown", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Teardown indicates an expected call of Teardown. +func (mr *ProcessorMockRecorder) Teardown(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Teardown", reflect.TypeOf((*Processor)(nil).Teardown), arg0) +} diff --git a/pkg/pipeline/stream/parallel_test.go b/pkg/pipeline/stream/parallel_test.go index a54e8d172..037617e82 100644 --- a/pkg/pipeline/stream/parallel_test.go +++ b/pkg/pipeline/stream/parallel_test.go @@ -22,12 +22,14 @@ import ( "testing" "time" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/foundation/cchan" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/csync" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/foundation/metrics/noop" - pmock "github.com/conduitio/conduit/pkg/processor/mock" + "github.com/conduitio/conduit/pkg/pipeline/stream/mock" "github.com/conduitio/conduit/pkg/record" "github.com/matryer/is" "go.uber.org/mock/gomock" @@ -559,14 +561,28 @@ func TestParallelNode_Processor(t *testing.T) { const msgCount = 1000 // create a dummy processor - proc := pmock.NewProcessor(ctrl) - proc.EXPECT().Process(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, r record.Record) (record.Record, error) { - current := atomic.AddInt32(&workersCurrent, 1) - atomic.CompareAndSwapInt32(&workersHighWatermark, current-1, current) - time.Sleep(time.Millisecond) // sleep to let other workers catch up - atomic.AddInt32(&workersCurrent, -1) - return r, nil - }).Times(msgCount) + proc := mock.NewProcessor(ctrl) + proc.EXPECT(). + Open(gomock.Any()). + Times(workerCount) + proc.EXPECT(). + Process(gomock.Any(), gomock.Any()). + DoAndReturn(func(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + current := atomic.AddInt32(&workersCurrent, 1) + atomic.CompareAndSwapInt32(&workersHighWatermark, current-1, current) + time.Sleep(time.Millisecond) // sleep to let other workers catch up + atomic.AddInt32(&workersCurrent, -1) + + out := make([]sdk.ProcessedRecord, len(records)) + for i, r := range records { + out[i] = sdk.SingleRecord(r) + } + + return out + }).Times(msgCount) + proc.EXPECT(). + Teardown(gomock.Any()). + Times(workerCount) newProcNode := func(i int) PubSubNode { return &ProcessorNode{ diff --git a/pkg/pipeline/stream/processor.go b/pkg/pipeline/stream/processor.go index 2ffc56cf2..bea278a55 100644 --- a/pkg/pipeline/stream/processor.go +++ b/pkg/pipeline/stream/processor.go @@ -12,27 +12,41 @@ // See the License for the specific language governing permissions and // limitations under the License. +//go:generate mockgen -destination=mock/processor.go -package=mock -mock_names=Processor=Processor . Processor + package stream import ( + "bytes" "context" "time" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/foundation/metrics" - "github.com/conduitio/conduit/pkg/processor" + "github.com/conduitio/conduit/pkg/record" ) type ProcessorNode struct { Name string - Processor processor.Interface + Processor Processor ProcessorTimer metrics.Timer base pubSubNodeBase logger log.CtxLogger } +type Processor interface { + // Open configures and opens a processor plugin + Open(ctx context.Context) error + Process(context.Context, []opencdc.Record) []sdk.ProcessedRecord + // Teardown tears down a processor plugin. + // In case of standalone plugins, that means stopping the WASM module. + Teardown(context.Context) error +} + func (n *ProcessorNode) ID() string { return n.Name } @@ -42,8 +56,24 @@ func (n *ProcessorNode) Run(ctx context.Context) error { if err != nil { return err } - defer cleanup() + // Teardown needs to be called even if Open() fails + // (to mark the processor as not running) + defer func() { + n.logger.Debug(ctx).Msg("tearing down processor") + tdErr := n.Processor.Teardown(ctx) + err = cerrors.LogOrReplace(err, tdErr, func() { + n.logger.Err(ctx, tdErr).Msg("could not tear down processor") + }) + }() + + n.logger.Debug(ctx).Msg("opening processor") + err = n.Processor.Open(ctx) + if err != nil { + n.logger.Err(ctx, err).Msg("failed opening processor") + return cerrors.Errorf("couldn't open processor: %w", err) + } + for { msg, err := trigger() if err != nil || msg == nil { @@ -51,31 +81,40 @@ func (n *ProcessorNode) Run(ctx context.Context) error { } executeTime := time.Now() - rec, err := n.Processor.Process(msg.Ctx, msg.Record) + recsIn := []opencdc.Record{msg.Record.ToOpenCDC()} + recsOut := n.Processor.Process(msg.Ctx, recsIn) n.ProcessorTimer.Update(time.Since(executeTime)) - if err != nil { - // Check for Skipped records - switch err { - case processor.ErrSkipRecord: - // NB: Ack skipped messages since they've been correctly handled - err := msg.Ack() - if err != nil { - return cerrors.Errorf("failed to ack skipped message: %w", err) - } - default: - err = msg.Nack(err, n.ID()) - if err != nil { - return cerrors.Errorf("error executing processor: %w", err) - } + + if len(recsIn) != len(recsOut) { + err := cerrors.Errorf("processor was given %v record(s), but returned %v", len(recsIn), len(recsOut)) + // todo when processors can accept multiple records + // make sure that we ack as many records as possible + // (here we simply nack all of them, which is always only one) + if nackErr := msg.Nack(err, n.ID()); nackErr != nil { + return nackErr } - // error was handled successfully, we recovered - continue + return err } - msg.Record = rec - err = n.base.Send(ctx, n.logger, msg) - if err != nil { - return msg.Nack(err, n.ID()) + switch v := recsOut[0].(type) { + case sdk.SingleRecord: + err := n.handleSingleRecord(ctx, msg, v) + // handleSingleRecord already checks the nack error (if any) + // so it's enough to just return the error from it + if err != nil { + return err + } + case sdk.FilterRecord: + // NB: Ack skipped messages since they've been correctly handled + err := msg.Ack() + if err != nil { + return cerrors.Errorf("failed to ack skipped message: %w", err) + } + case sdk.ErrorRecord: + err = msg.Nack(v.Error, n.ID()) + if err != nil { + return cerrors.Errorf("error executing processor: %w", err) + } } } } @@ -91,3 +130,34 @@ func (n *ProcessorNode) Pub() <-chan *Message { func (n *ProcessorNode) SetLogger(logger log.CtxLogger) { n.logger = logger } + +// handleSingleRecord handles a sdk.SingleRecord by checking the position, +// setting the new record on the message and sending it downstream. +// If there are any errors, the method nacks the message and returns +// an appropriate error (if nack-ing failed, it returns the nack error) +func (n *ProcessorNode) handleSingleRecord(ctx context.Context, msg *Message, v sdk.SingleRecord) error { + recOut := record.FromOpenCDC(opencdc.Record(v)) + if !bytes.Equal(recOut.Position, msg.Record.Position) { + err := cerrors.Errorf( + "processor changed position from '%v' to '%v' "+ + "(not allowed because source connector cannot correctly acknowledge messages)", + msg.Record.Position, + recOut.Position, + ) + + if nackErr := msg.Nack(err, n.ID()); nackErr != nil { + return nackErr + } + // correctly nacked (sent to the DLQ) + // so we return the "original" error here + return err + } + + msg.Record = recOut + err := n.base.Send(ctx, n.logger, msg) + if err != nil { + return msg.Nack(err, n.ID()) + } + + return nil +} diff --git a/pkg/pipeline/stream/processor_test.go b/pkg/pipeline/stream/processor_test.go index 10100668f..e0656b809 100644 --- a/pkg/pipeline/stream/processor_test.go +++ b/pkg/pipeline/stream/processor_test.go @@ -19,10 +19,11 @@ import ( "sync" "testing" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/metrics/noop" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/processor/mock" + "github.com/conduitio/conduit/pkg/pipeline/stream/mock" "github.com/conduitio/conduit/pkg/record" "github.com/google/uuid" "github.com/matryer/is" @@ -38,16 +39,17 @@ func TestProcessorNode_Success(t *testing.T) { Position: []byte(uuid.NewString()), Metadata: map[string]string{"foo": "bar"}, } - newPosition := []byte(uuid.NewString()) + newMetaKey := "bar2" processor := mock.NewProcessor(ctrl) - processor. - EXPECT(). - Process(ctx, wantRec). - DoAndReturn(func(_ context.Context, got record.Record) (record.Record, error) { - got.Position = newPosition - return got, nil + processor.EXPECT().Open(gomock.Any()) + processor.EXPECT(). + Process(ctx, []opencdc.Record{wantRec.ToOpenCDC()}). + DoAndReturn(func(_ context.Context, got []opencdc.Record) []sdk.ProcessedRecord { + got[0].Metadata["foo"] = newMetaKey + return []sdk.ProcessedRecord{sdk.SingleRecord(got[0])} }) + processor.EXPECT().Teardown(gomock.Any()) n := ProcessorNode{ Name: "test", @@ -80,7 +82,7 @@ func TestProcessorNode_Success(t *testing.T) { Ctx: ctx, Record: wantRec, } - wantMsg.Record.Position = newPosition // position was transformed + wantMsg.Record.Metadata["foo"] = newMetaKey // position was transformed is.Equal(wantMsg, got) wg.Wait() // wait for node to stop running @@ -97,7 +99,11 @@ func TestProcessorNode_ErrorWithoutNackHandler(t *testing.T) { wantErr := cerrors.New("something bad happened") processor := mock.NewProcessor(ctrl) - processor.EXPECT().Process(ctx, gomock.Any()).Return(record.Record{}, wantErr) + processor.EXPECT().Open(gomock.Any()) + processor.EXPECT(). + Process(ctx, gomock.Any()). + Return([]sdk.ProcessedRecord{sdk.ErrorRecord{Error: wantErr}}) + processor.EXPECT().Teardown(gomock.Any()) n := ProcessorNode{ Name: "test", @@ -127,11 +133,19 @@ func TestProcessorNode_ErrorWithoutNackHandler(t *testing.T) { func TestProcessorNode_ErrorWithNackHandler(t *testing.T) { is := is.New(t) ctx := context.Background() - ctrl := gomock.NewController(t) wantErr := cerrors.New("something bad happened") - processor := mock.NewProcessor(ctrl) - processor.EXPECT().Process(ctx, gomock.Any()).Return(record.Record{}, wantErr) + processor := mock.NewProcessor(gomock.NewController(t)) + processor.EXPECT().Open(gomock.Any()) + processor.EXPECT(). + Process(ctx, gomock.Any()). + Return([]sdk.ProcessedRecord{sdk.ErrorRecord{Error: wantErr}}) + processor.EXPECT().Teardown(gomock.Any()) + + nackHandler := func(msg *Message, nackMetadata NackMetadata) error { + is.New(t).True(cerrors.Is(nackMetadata.Reason, wantErr)) // expected underlying error to be the processor error + return nil // the error should be regarded as handled + } n := ProcessorNode{ Name: "test", @@ -144,10 +158,7 @@ func TestProcessorNode_ErrorWithNackHandler(t *testing.T) { out := n.Pub() msg := &Message{Ctx: ctx} - msg.RegisterNackHandler(func(msg *Message, nackMetadata NackMetadata) error { - is.True(cerrors.Is(nackMetadata.Reason, wantErr)) // expected underlying error to be the processor error - return nil // the error should be regarded as handled - }) + msg.RegisterNackHandler(nackHandler) go func() { // publisher in <- msg @@ -163,6 +174,84 @@ func TestProcessorNode_ErrorWithNackHandler(t *testing.T) { is.Equal(false, ok) } +func TestProcessorNode_BadProcessor_ReturnsMoreRecords(t *testing.T) { + is := is.New(t) + + processor := mock.NewProcessor(gomock.NewController(t)) + processor.EXPECT().Open(gomock.Any()) + processor.EXPECT(). + Process(context.Background(), gomock.Any()). + // processor returns 2 records instead of one + Return([]sdk.ProcessedRecord{sdk.SingleRecord{}, sdk.SingleRecord{}}) + processor.EXPECT().Teardown(gomock.Any()) + + nackHandler := func(msg *Message, nackMetadata NackMetadata) error { + // expected underlying error to be the processor error + is.New(t).Equal("processor was given 1 record(s), but returned 2", nackMetadata.Reason.Error()) + return nil // the error should be regarded as handled + } + + testNodeWithError(is, processor, nackHandler) +} + +func TestProcessorNode_BadProcessor_ChangesPosition(t *testing.T) { + is := is.New(t) + + processor := mock.NewProcessor(gomock.NewController(t)) + processor.EXPECT().Open(gomock.Any()) + processor.EXPECT(). + Process(context.Background(), gomock.Any()). + // processor returns 2 records instead of one + Return([]sdk.ProcessedRecord{sdk.SingleRecord{Position: opencdc.Position("new position")}}) + processor.EXPECT().Teardown(gomock.Any()) + + nackHandler := func(msg *Message, nackMetadata NackMetadata) error { + // expected underlying error to be the processor error + is.New(t).Equal( + "processor changed position from 'test position' to 'new position' "+ + "(not allowed because source connector cannot correctly acknowledge messages)", + nackMetadata.Reason.Error(), + ) + return nil // the error should be regarded as handled + } + + testNodeWithError(is, processor, nackHandler) +} + +func testNodeWithError(is *is.I, processor *mock.Processor, nackHandler NackHandler) { + ctx := context.Background() + n := ProcessorNode{ + Name: "test", + Processor: processor, + ProcessorTimer: noop.Timer{}, + } + + in := make(chan *Message) + n.Sub(in) + out := n.Pub() + + msg := &Message{ + Ctx: ctx, + Record: record.Record{ + Position: record.Position("test position"), + }, + } + msg.RegisterNackHandler(nackHandler) + go func() { + // publisher + in <- msg + close(in) + }() + + err := n.Run(ctx) + is.True(err != nil) + is.Equal(MessageStatusNacked, msg.Status()) + + // after the node stops the out channel should be closed + _, ok := <-out + is.Equal(false, ok) +} + func TestProcessorNode_Skip(t *testing.T) { is := is.New(t) ctx := context.Background() @@ -170,7 +259,11 @@ func TestProcessorNode_Skip(t *testing.T) { // create a dummy processor proc := mock.NewProcessor(ctrl) - proc.EXPECT().Process(ctx, gomock.Any()).Return(record.Record{}, processor.ErrSkipRecord) + proc.EXPECT().Open(gomock.Any()) + proc.EXPECT(). + Process(ctx, gomock.Any()). + Return([]sdk.ProcessedRecord{sdk.FilterRecord{}}) + proc.EXPECT().Teardown(gomock.Any()) n := ProcessorNode{ Name: "test", diff --git a/pkg/pipeline/stream/source_test.go b/pkg/pipeline/stream/source_test.go index bf0a7c466..c19134107 100644 --- a/pkg/pipeline/stream/source_test.go +++ b/pkg/pipeline/stream/source_test.go @@ -26,7 +26,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/metrics/noop" "github.com/conduitio/conduit/pkg/pipeline/stream/mock" - "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/record" "github.com/google/uuid" "github.com/matryer/is" @@ -97,7 +97,7 @@ func TestSourceNode_Stop_ConcurrentFail(t *testing.T) { src.EXPECT().Read(gomock.Any()).DoAndReturn(func(ctx context.Context) (record.Record, error) { close(startRead) <-unblockRead - return record.Record{}, plugin.ErrStreamNotOpen + return record.Record{}, connectorPlugin.ErrStreamNotOpen }).Times(1) startStop := make(chan struct{}) unblockStop := make(chan struct{}) @@ -124,7 +124,7 @@ func TestSourceNode_Stop_ConcurrentFail(t *testing.T) { go func() { defer close(nodeDone) err := node.Run(ctx) - is.True(cerrors.Is(err, plugin.ErrStreamNotOpen)) + is.True(cerrors.Is(err, connectorPlugin.ErrStreamNotOpen)) }() _, ok, err := cchan.ChanOut[struct{}](startRead).RecvTimeout(ctx, time.Second) @@ -348,7 +348,7 @@ func newMockSource(ctrl *gomock.Controller, recordCount int, wantErr error) (*mo return record.Record{}, wantErr } <-teardown - return record.Record{}, plugin.ErrStreamNotOpen + return record.Record{}, connectorPlugin.ErrStreamNotOpen } r := records[position] position++ diff --git a/pkg/pipeline/stream/stream_test.go b/pkg/pipeline/stream/stream_test.go index 3afb80b48..f820be8d6 100644 --- a/pkg/pipeline/stream/stream_test.go +++ b/pkg/pipeline/stream/stream_test.go @@ -21,15 +21,15 @@ import ( "sync" "time" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/foundation/csync" "github.com/conduitio/conduit/pkg/foundation/ctxutil" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/foundation/metrics/noop" "github.com/conduitio/conduit/pkg/pipeline/stream" streammock "github.com/conduitio/conduit/pkg/pipeline/stream/mock" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/processor" - procmock "github.com/conduitio/conduit/pkg/processor/mock" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/record" "github.com/rs/zerolog" "go.uber.org/mock/gomock" @@ -234,6 +234,7 @@ func Example_complexStream() { } // Unordered output: + // DBG opening processor component=ProcessorNode node_id=counter // DBG got record message_id=generator2/1 node_id=printer2 // DBG got record message_id=generator2/1 node_id=printer1 // DBG received ack message_id=generator2/1 node_id=generator2 @@ -301,6 +302,7 @@ func Example_complexStream() { // DBG incoming messages channel closed component=SourceAckerNode node_id=generator1-acker // DBG incoming messages channel closed component=SourceAckerNode node_id=generator2-acker // DBG incoming messages channel closed component=ProcessorNode node_id=counter + // DBG tearing down processor component=ProcessorNode node_id=counter // DBG incoming messages channel closed component=DestinationNode node_id=printer1 // DBG incoming messages channel closed component=DestinationNode node_id=printer2 // DBG incoming messages channel closed component=DestinationAckerNode node_id=printer1-acker @@ -343,7 +345,7 @@ func generatorSource(ctrl *gomock.Controller, logger log.CtxLogger, nodeID strin if position == recordCount { // block until Teardown is called <-teardown - return record.Record{}, plugin.ErrStreamNotOpen + return record.Record{}, connectorPlugin.ErrStreamNotOpen } position++ @@ -393,12 +395,22 @@ func printerDestination(ctrl *gomock.Controller, logger log.CtxLogger, nodeID st return destination } -func counterProcessor(ctrl *gomock.Controller, count *int) processor.Interface { - proc := procmock.NewProcessor(ctrl) - proc.EXPECT().Process(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, r record.Record) (record.Record, error) { - *count++ - return r, nil - }).AnyTimes() +func counterProcessor(ctrl *gomock.Controller, count *int) stream.Processor { + proc := streammock.NewProcessor(ctrl) + proc.EXPECT().Open(gomock.Any()) + proc.EXPECT(). + Process(gomock.Any(), gomock.Any()). + DoAndReturn(func(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + *count++ + + out := make([]sdk.ProcessedRecord, len(records)) + for i, r := range records { + out[i] = sdk.SingleRecord(r) + } + + return out + }).AnyTimes() + proc.EXPECT().Teardown(gomock.Any()) return proc } diff --git a/pkg/plugin/builtin/v1/internal/fromplugin/specifier.go b/pkg/plugin/builtin/v1/internal/fromplugin/specifier.go deleted file mode 100644 index b5257a3d3..000000000 --- a/pkg/plugin/builtin/v1/internal/fromplugin/specifier.go +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package fromplugin - -import ( - "github.com/conduitio/conduit-connector-protocol/cpluginv1" - "github.com/conduitio/conduit/pkg/plugin" -) - -func _() { - // An "invalid array index" compiler error signifies that the constant values have changed. - var vTypes [1]struct{} - _ = vTypes[int(cpluginv1.ValidationTypeRequired)-int(plugin.ValidationTypeRequired)] - _ = vTypes[int(cpluginv1.ValidationTypeLessThan)-int(plugin.ValidationTypeLessThan)] - _ = vTypes[int(cpluginv1.ValidationTypeGreaterThan)-int(plugin.ValidationTypeGreaterThan)] - _ = vTypes[int(cpluginv1.ValidationTypeInclusion)-int(plugin.ValidationTypeInclusion)] - _ = vTypes[int(cpluginv1.ValidationTypeExclusion)-int(plugin.ValidationTypeExclusion)] - _ = vTypes[int(cpluginv1.ValidationTypeRegex)-int(plugin.ValidationTypeRegex)] - // parameter types - _ = vTypes[int(cpluginv1.ParameterTypeString)-int(plugin.ParameterTypeString)] - _ = vTypes[int(cpluginv1.ParameterTypeInt)-int(plugin.ParameterTypeInt)] - _ = vTypes[int(cpluginv1.ParameterTypeFloat)-int(plugin.ParameterTypeFloat)] - _ = vTypes[int(cpluginv1.ParameterTypeBool)-int(plugin.ParameterTypeBool)] - _ = vTypes[int(cpluginv1.ParameterTypeFile)-int(plugin.ParameterTypeFile)] - _ = vTypes[int(cpluginv1.ParameterTypeDuration)-int(plugin.ParameterTypeDuration)] -} - -func SpecifierSpecifyResponse(in cpluginv1.SpecifierSpecifyResponse) (plugin.Specification, error) { - specMap := func(params map[string]cpluginv1.SpecifierParameter) map[string]plugin.Parameter { - out := make(map[string]plugin.Parameter) - for k, v := range params { - out[k] = SpecifierParameter(v) - } - return out - } - - return plugin.Specification{ - Name: in.Name, - Summary: in.Summary, - Description: in.Description, - Version: in.Version, - Author: in.Author, - DestinationParams: specMap(in.DestinationParams), - SourceParams: specMap(in.SourceParams), - }, nil -} - -func SpecifierParameter(in cpluginv1.SpecifierParameter) plugin.Parameter { - validations := make([]plugin.Validation, len(in.Validations)) - - requiredExists := false - for i, v := range in.Validations { - validations[i] = plugin.Validation{ - Type: plugin.ValidationType(v.Type), - Value: v.Value, - } - if v.Type == cpluginv1.ValidationTypeRequired { - requiredExists = true - } - } - //nolint:staticcheck // needed for backward compatibility, in.Required is - // converted to a validation of type ValidationTypeRequired making sure not - // to duplicate the required validation - if in.Required && !requiredExists { - //nolint:makezero // false positive, we actually want to append here - validations = append(validations, plugin.Validation{ - Type: plugin.ValidationTypeRequired, - }) - } - - return plugin.Parameter{ - Default: in.Default, - Type: cpluginv1ParamTypeToPluginParamType(in.Type), - Description: in.Description, - Validations: validations, - } -} - -func cpluginv1ParamTypeToPluginParamType(t cpluginv1.ParameterType) plugin.ParameterType { - // default type should be string - if t == 0 { - return plugin.ParameterTypeString - } - return plugin.ParameterType(t) -} diff --git a/pkg/plugin/acceptance_testing.go b/pkg/plugin/connector/acceptance_testing.go similarity index 99% rename from pkg/plugin/acceptance_testing.go rename to pkg/plugin/connector/acceptance_testing.go index ac263b463..d62d419ab 100644 --- a/pkg/plugin/acceptance_testing.go +++ b/pkg/plugin/connector/acceptance_testing.go @@ -1,4 +1,4 @@ -// Copyright © 2022 Meroxa, Inc. +// Copyright © 2023 Meroxa, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -13,7 +13,7 @@ // limitations under the License. //nolint:dogsled // this is a test file -package plugin +package connector import ( "context" diff --git a/pkg/plugin/builtin/registry.go b/pkg/plugin/connector/builtin/registry.go similarity index 83% rename from pkg/plugin/builtin/registry.go rename to pkg/plugin/connector/builtin/registry.go index b2b404aa4..b0a34667e 100644 --- a/pkg/plugin/builtin/registry.go +++ b/pkg/plugin/connector/builtin/registry.go @@ -29,7 +29,8 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" - builtinv1 "github.com/conduitio/conduit/pkg/plugin/builtin/v1" + "github.com/conduitio/conduit/pkg/plugin/connector" + builtinv1 "github.com/conduitio/conduit/pkg/plugin/connector/builtin/v1" ) var ( @@ -56,35 +57,35 @@ type Registry struct { type blueprint struct { fullName plugin.FullName - specification plugin.Specification + specification connector.Specification dispenserFactory DispenserFactory } -type DispenserFactory func(name plugin.FullName, logger log.CtxLogger) plugin.Dispenser +type DispenserFactory func(name plugin.FullName, logger log.CtxLogger) connector.Dispenser -func NewDispenserFactory(connector sdk.Connector) DispenserFactory { - if connector.NewSource == nil { - connector.NewSource = func() sdk.Source { return nil } +func NewDispenserFactory(conn sdk.Connector) DispenserFactory { + if conn.NewSource == nil { + conn.NewSource = func() sdk.Source { return nil } } - if connector.NewDestination == nil { - connector.NewDestination = func() sdk.Destination { return nil } + if conn.NewDestination == nil { + conn.NewDestination = func() sdk.Destination { return nil } } - return func(name plugin.FullName, logger log.CtxLogger) plugin.Dispenser { + return func(name plugin.FullName, logger log.CtxLogger) connector.Dispenser { return builtinv1.NewDispenser( name, logger, func() cpluginv1.SpecifierPlugin { - return sdk.NewSpecifierPlugin(connector.NewSpecification(), connector.NewSource(), connector.NewDestination()) + return sdk.NewSpecifierPlugin(conn.NewSpecification(), conn.NewSource(), conn.NewDestination()) }, - func() cpluginv1.SourcePlugin { return sdk.NewSourcePlugin(connector.NewSource()) }, - func() cpluginv1.DestinationPlugin { return sdk.NewDestinationPlugin(connector.NewDestination()) }, + func() cpluginv1.SourcePlugin { return sdk.NewSourcePlugin(conn.NewSource()) }, + func() cpluginv1.DestinationPlugin { return sdk.NewDestinationPlugin(conn.NewDestination()) }, ) } } func NewRegistry(logger log.CtxLogger, factories map[string]DispenserFactory) *Registry { - logger = logger.WithComponent("builtin.Registry") + logger = logger.WithComponentFromType(Registry{}) buildInfo, ok := debug.ReadBuildInfo() if !ok { // we are using modules, build info should always be available, we are staying on the safe side @@ -135,15 +136,15 @@ func loadPlugins(buildInfo *debug.BuildInfo, factories map[string]DispenserFacto return plugins } -func getSpecification(moduleName string, factory DispenserFactory, buildInfo *debug.BuildInfo) (plugin.Specification, error) { +func getSpecification(moduleName string, factory DispenserFactory, buildInfo *debug.BuildInfo) (connector.Specification, error) { dispenser := factory("", log.CtxLogger{}) specPlugin, err := dispenser.DispenseSpecifier() if err != nil { - return plugin.Specification{}, cerrors.Errorf("could not dispense specifier for built in plugin: %w", err) + return connector.Specification{}, cerrors.Errorf("could not dispense specifier for built in plugin: %w", err) } specs, err := specPlugin.Specify() if err != nil { - return plugin.Specification{}, cerrors.Errorf("could not get specs for built in plugin: %w", err) + return connector.Specification{}, cerrors.Errorf("could not get specs for built in plugin: %w", err) } if version := getModuleVersion(buildInfo.Deps, moduleName); version != "" { @@ -170,7 +171,7 @@ func newFullName(pluginName, pluginVersion string) plugin.FullName { return plugin.NewFullName(plugin.PluginTypeBuiltin, pluginName, pluginVersion) } -func (r *Registry) NewDispenser(logger log.CtxLogger, fullName plugin.FullName) (plugin.Dispenser, error) { +func (r *Registry) NewDispenser(logger log.CtxLogger, fullName plugin.FullName) (connector.Dispenser, error) { versionMap, ok := r.plugins[fullName.PluginName()] if !ok { return nil, plugin.ErrPluginNotFound @@ -187,8 +188,8 @@ func (r *Registry) NewDispenser(logger log.CtxLogger, fullName plugin.FullName) return b.dispenserFactory(fullName, logger), nil } -func (r *Registry) List() map[plugin.FullName]plugin.Specification { - specs := make(map[plugin.FullName]plugin.Specification, len(r.plugins)) +func (r *Registry) List() map[plugin.FullName]connector.Specification { + specs := make(map[plugin.FullName]connector.Specification, len(r.plugins)) for _, versions := range r.plugins { for version, bp := range versions { if version == plugin.PluginVersionLatest { diff --git a/pkg/plugin/builtin/v1/acceptance_test.go b/pkg/plugin/connector/builtin/v1/acceptance_test.go similarity index 86% rename from pkg/plugin/builtin/v1/acceptance_test.go rename to pkg/plugin/connector/builtin/v1/acceptance_test.go index a5149a5a0..29506e78e 100644 --- a/pkg/plugin/builtin/v1/acceptance_test.go +++ b/pkg/plugin/connector/builtin/v1/acceptance_test.go @@ -17,9 +17,9 @@ package builtinv1 import ( "testing" - "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" ) func TestAcceptance(t *testing.T) { - plugin.AcceptanceTestV1(t, newTestDispenser) + connector.AcceptanceTestV1(t, newTestDispenser) } diff --git a/pkg/plugin/builtin/v1/destination.go b/pkg/plugin/connector/builtin/v1/destination.go similarity index 93% rename from pkg/plugin/builtin/v1/destination.go rename to pkg/plugin/connector/builtin/v1/destination.go index e15867796..b5728c509 100644 --- a/pkg/plugin/builtin/v1/destination.go +++ b/pkg/plugin/connector/builtin/v1/destination.go @@ -20,9 +20,9 @@ import ( "github.com/conduitio/conduit-connector-protocol/cpluginv1" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/builtin/v1/internal/fromplugin" - "github.com/conduitio/conduit/pkg/plugin/builtin/v1/internal/toplugin" + "github.com/conduitio/conduit/pkg/plugin/connector" + "github.com/conduitio/conduit/pkg/plugin/connector/builtin/v1/internal/fromplugin" + "github.com/conduitio/conduit/pkg/plugin/connector/builtin/v1/internal/toplugin" "github.com/conduitio/conduit/pkg/record" "github.com/rs/zerolog" ) @@ -43,7 +43,7 @@ type destinationPluginAdapter struct { stream *stream[cpluginv1.DestinationRunRequest, cpluginv1.DestinationRunResponse] } -var _ plugin.DestinationPlugin = (*destinationPluginAdapter)(nil) +var _ connector.DestinationPlugin = (*destinationPluginAdapter)(nil) func newDestinationPluginAdapter(impl cpluginv1.DestinationPlugin, logger log.CtxLogger) *destinationPluginAdapter { return &destinationPluginAdapter{ @@ -83,7 +83,7 @@ func (s *destinationPluginAdapter) Start(ctx context.Context) error { s.logger.Err(ctx, err).Msg("stream already stopped") } } else { - s.stream.stop(plugin.ErrStreamNotOpen) + s.stream.stop(connector.ErrStreamNotOpen) } s.logger.Trace(ctx).Msg("Run stopped") }() @@ -93,7 +93,7 @@ func (s *destinationPluginAdapter) Start(ctx context.Context) error { func (s *destinationPluginAdapter) Write(ctx context.Context, r record.Record) error { if s.stream == nil { - return plugin.ErrStreamNotOpen + return connector.ErrStreamNotOpen } req, err := toplugin.DestinationRunRequest(r) @@ -112,7 +112,7 @@ func (s *destinationPluginAdapter) Write(ctx context.Context, r record.Record) e func (s *destinationPluginAdapter) Ack(ctx context.Context) (record.Position, error) { if s.stream == nil { - return nil, plugin.ErrStreamNotOpen + return nil, connector.ErrStreamNotOpen } s.logger.Trace(ctx).Msg("receiving ack") diff --git a/pkg/plugin/builtin/v1/dispenser.go b/pkg/plugin/connector/builtin/v1/dispenser.go similarity index 87% rename from pkg/plugin/builtin/v1/dispenser.go rename to pkg/plugin/connector/builtin/v1/dispenser.go index ebbed428e..ecbdef3f4 100644 --- a/pkg/plugin/builtin/v1/dispenser.go +++ b/pkg/plugin/connector/builtin/v1/dispenser.go @@ -18,6 +18,7 @@ import ( "github.com/conduitio/conduit-connector-protocol/cpluginv1" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" ) type Dispenser struct { @@ -44,15 +45,15 @@ func NewDispenser( } } -func (d *Dispenser) DispenseSpecifier() (plugin.SpecifierPlugin, error) { +func (d *Dispenser) DispenseSpecifier() (connector.SpecifierPlugin, error) { return newSpecifierPluginAdapter(d.specifierPlugin(), d.pluginLogger("specifier")), nil } -func (d *Dispenser) DispenseSource() (plugin.SourcePlugin, error) { +func (d *Dispenser) DispenseSource() (connector.SourcePlugin, error) { return newSourcePluginAdapter(d.sourcePlugin(), d.pluginLogger("source")), nil } -func (d *Dispenser) DispenseDestination() (plugin.DestinationPlugin, error) { +func (d *Dispenser) DispenseDestination() (connector.DestinationPlugin, error) { return newDestinationPluginAdapter(d.destinationPlugin(), d.pluginLogger("destination")), nil } diff --git a/pkg/plugin/builtin/v1/dispenser_test.go b/pkg/plugin/connector/builtin/v1/dispenser_test.go similarity index 88% rename from pkg/plugin/builtin/v1/dispenser_test.go rename to pkg/plugin/connector/builtin/v1/dispenser_test.go index 9e9503e33..9502dffa4 100644 --- a/pkg/plugin/builtin/v1/dispenser_test.go +++ b/pkg/plugin/connector/builtin/v1/dispenser_test.go @@ -20,12 +20,12 @@ import ( "github.com/conduitio/conduit-connector-protocol/cpluginv1" "github.com/conduitio/conduit-connector-protocol/cpluginv1/mock" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/rs/zerolog" "go.uber.org/mock/gomock" ) -func newTestDispenser(t *testing.T) (plugin.Dispenser, *mock.SpecifierPlugin, *mock.SourcePlugin, *mock.DestinationPlugin) { +func newTestDispenser(t *testing.T) (connector.Dispenser, *mock.SpecifierPlugin, *mock.SourcePlugin, *mock.DestinationPlugin) { logger := log.InitLogger(zerolog.InfoLevel, log.FormatCLI) ctrl := gomock.NewController(t) diff --git a/pkg/plugin/builtin/v1/internal/fromplugin/destination.go b/pkg/plugin/connector/builtin/v1/internal/fromplugin/destination.go similarity index 100% rename from pkg/plugin/builtin/v1/internal/fromplugin/destination.go rename to pkg/plugin/connector/builtin/v1/internal/fromplugin/destination.go diff --git a/pkg/plugin/builtin/v1/internal/fromplugin/record.go b/pkg/plugin/connector/builtin/v1/internal/fromplugin/record.go similarity index 100% rename from pkg/plugin/builtin/v1/internal/fromplugin/record.go rename to pkg/plugin/connector/builtin/v1/internal/fromplugin/record.go diff --git a/pkg/plugin/builtin/v1/internal/fromplugin/source.go b/pkg/plugin/connector/builtin/v1/internal/fromplugin/source.go similarity index 100% rename from pkg/plugin/builtin/v1/internal/fromplugin/source.go rename to pkg/plugin/connector/builtin/v1/internal/fromplugin/source.go diff --git a/pkg/plugin/connector/builtin/v1/internal/fromplugin/specifier.go b/pkg/plugin/connector/builtin/v1/internal/fromplugin/specifier.go new file mode 100644 index 000000000..af265c999 --- /dev/null +++ b/pkg/plugin/connector/builtin/v1/internal/fromplugin/specifier.go @@ -0,0 +1,98 @@ +// Copyright © 2022 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package fromplugin + +import ( + "github.com/conduitio/conduit-connector-protocol/cpluginv1" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" +) + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + var vTypes [1]struct{} + _ = vTypes[int(cpluginv1.ValidationTypeRequired)-int(connectorPlugin.ValidationTypeRequired)] + _ = vTypes[int(cpluginv1.ValidationTypeLessThan)-int(connectorPlugin.ValidationTypeLessThan)] + _ = vTypes[int(cpluginv1.ValidationTypeGreaterThan)-int(connectorPlugin.ValidationTypeGreaterThan)] + _ = vTypes[int(cpluginv1.ValidationTypeInclusion)-int(connectorPlugin.ValidationTypeInclusion)] + _ = vTypes[int(cpluginv1.ValidationTypeExclusion)-int(connectorPlugin.ValidationTypeExclusion)] + _ = vTypes[int(cpluginv1.ValidationTypeRegex)-int(connectorPlugin.ValidationTypeRegex)] + // parameter types + _ = vTypes[int(cpluginv1.ParameterTypeString)-int(connectorPlugin.ParameterTypeString)] + _ = vTypes[int(cpluginv1.ParameterTypeInt)-int(connectorPlugin.ParameterTypeInt)] + _ = vTypes[int(cpluginv1.ParameterTypeFloat)-int(connectorPlugin.ParameterTypeFloat)] + _ = vTypes[int(cpluginv1.ParameterTypeBool)-int(connectorPlugin.ParameterTypeBool)] + _ = vTypes[int(cpluginv1.ParameterTypeFile)-int(connectorPlugin.ParameterTypeFile)] + _ = vTypes[int(cpluginv1.ParameterTypeDuration)-int(connectorPlugin.ParameterTypeDuration)] +} + +func SpecifierSpecifyResponse(in cpluginv1.SpecifierSpecifyResponse) (connectorPlugin.Specification, error) { + specMap := func(params map[string]cpluginv1.SpecifierParameter) map[string]connectorPlugin.Parameter { + out := make(map[string]connectorPlugin.Parameter) + for k, v := range params { + out[k] = SpecifierParameter(v) + } + return out + } + + return connectorPlugin.Specification{ + Name: in.Name, + Summary: in.Summary, + Description: in.Description, + Version: in.Version, + Author: in.Author, + DestinationParams: specMap(in.DestinationParams), + SourceParams: specMap(in.SourceParams), + }, nil +} + +func SpecifierParameter(in cpluginv1.SpecifierParameter) connectorPlugin.Parameter { + validations := make([]connectorPlugin.Validation, len(in.Validations)) + + requiredExists := false + for i, v := range in.Validations { + validations[i] = connectorPlugin.Validation{ + Type: connectorPlugin.ValidationType(v.Type), + Value: v.Value, + } + if v.Type == cpluginv1.ValidationTypeRequired { + requiredExists = true + } + } + //nolint:staticcheck // needed for backward compatibility + // in.Required is converted to a validation of type ValidationTypeRequired + // making sure not to duplicate the required validation + if in.Required && !requiredExists { + //nolint:makezero // regarding 'append to slice `validations` with non-zero initialized length' + // we don't know upfront if we'll have this additional validation or not + validations = append(validations, connectorPlugin.Validation{ + Type: connectorPlugin.ValidationTypeRequired, + }) + } + + return connectorPlugin.Parameter{ + Default: in.Default, + Type: cpluginv1ParamTypeToPluginParamType(in.Type), + Description: in.Description, + Validations: validations, + } +} + +func cpluginv1ParamTypeToPluginParamType(t cpluginv1.ParameterType) connectorPlugin.ParameterType { + // default type should be string + if t == 0 { + return connectorPlugin.ParameterTypeString + } + return connectorPlugin.ParameterType(t) +} diff --git a/pkg/plugin/builtin/v1/internal/toplugin/destination.go b/pkg/plugin/connector/builtin/v1/internal/toplugin/destination.go similarity index 100% rename from pkg/plugin/builtin/v1/internal/toplugin/destination.go rename to pkg/plugin/connector/builtin/v1/internal/toplugin/destination.go diff --git a/pkg/plugin/builtin/v1/internal/toplugin/record.go b/pkg/plugin/connector/builtin/v1/internal/toplugin/record.go similarity index 100% rename from pkg/plugin/builtin/v1/internal/toplugin/record.go rename to pkg/plugin/connector/builtin/v1/internal/toplugin/record.go diff --git a/pkg/plugin/builtin/v1/internal/toplugin/source.go b/pkg/plugin/connector/builtin/v1/internal/toplugin/source.go similarity index 100% rename from pkg/plugin/builtin/v1/internal/toplugin/source.go rename to pkg/plugin/connector/builtin/v1/internal/toplugin/source.go diff --git a/pkg/plugin/builtin/v1/internal/toplugin/specifier.go b/pkg/plugin/connector/builtin/v1/internal/toplugin/specifier.go similarity index 100% rename from pkg/plugin/builtin/v1/internal/toplugin/specifier.go rename to pkg/plugin/connector/builtin/v1/internal/toplugin/specifier.go diff --git a/pkg/plugin/builtin/v1/metadata_test.go b/pkg/plugin/connector/builtin/v1/metadata_test.go similarity index 100% rename from pkg/plugin/builtin/v1/metadata_test.go rename to pkg/plugin/connector/builtin/v1/metadata_test.go diff --git a/pkg/plugin/builtin/v1/sandbox.go b/pkg/plugin/connector/builtin/v1/sandbox.go similarity index 95% rename from pkg/plugin/builtin/v1/sandbox.go rename to pkg/plugin/connector/builtin/v1/sandbox.go index 25c2b9051..8e76c1ef4 100644 --- a/pkg/plugin/builtin/v1/sandbox.go +++ b/pkg/plugin/connector/builtin/v1/sandbox.go @@ -34,7 +34,7 @@ var sandboxChanPool = sync.Pool{ // request and return a response and an error (i.e. plugin calls). func runSandbox[REQ any, RES any]( f func(context.Context, REQ) (RES, error), - ctx context.Context, // context is the second parameter on purpose + ctx context.Context, req REQ, logger log.CtxLogger, ) (RES, error) { @@ -98,7 +98,7 @@ func runSandbox[REQ any, RES any]( func runSandboxNoResp[REQ any]( f func(context.Context, REQ) error, - ctx context.Context, // context is the second parameter on purpose + ctx context.Context, req REQ, logger log.CtxLogger, ) error { diff --git a/pkg/plugin/builtin/v1/sandbox_test.go b/pkg/plugin/connector/builtin/v1/sandbox_test.go similarity index 100% rename from pkg/plugin/builtin/v1/sandbox_test.go rename to pkg/plugin/connector/builtin/v1/sandbox_test.go diff --git a/pkg/plugin/builtin/v1/source.go b/pkg/plugin/connector/builtin/v1/source.go similarity index 93% rename from pkg/plugin/builtin/v1/source.go rename to pkg/plugin/connector/builtin/v1/source.go index 71b15b6f3..8c4866750 100644 --- a/pkg/plugin/builtin/v1/source.go +++ b/pkg/plugin/connector/builtin/v1/source.go @@ -20,9 +20,9 @@ import ( "github.com/conduitio/conduit-connector-protocol/cpluginv1" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/builtin/v1/internal/fromplugin" - "github.com/conduitio/conduit/pkg/plugin/builtin/v1/internal/toplugin" + "github.com/conduitio/conduit/pkg/plugin/connector" + "github.com/conduitio/conduit/pkg/plugin/connector/builtin/v1/internal/fromplugin" + "github.com/conduitio/conduit/pkg/plugin/connector/builtin/v1/internal/toplugin" "github.com/conduitio/conduit/pkg/record" "github.com/rs/zerolog" ) @@ -43,7 +43,7 @@ type sourcePluginAdapter struct { stream *stream[cpluginv1.SourceRunRequest, cpluginv1.SourceRunResponse] } -var _ plugin.SourcePlugin = (*sourcePluginAdapter)(nil) +var _ connector.SourcePlugin = (*sourcePluginAdapter)(nil) func newSourcePluginAdapter(impl cpluginv1.SourcePlugin, logger log.CtxLogger) *sourcePluginAdapter { return &sourcePluginAdapter{ @@ -86,7 +86,7 @@ func (s *sourcePluginAdapter) Start(ctx context.Context, p record.Position) erro s.logger.Err(ctx, err).Msg("stream already stopped") } } else { - s.stream.stop(plugin.ErrStreamNotOpen) + s.stream.stop(connector.ErrStreamNotOpen) } s.logger.Trace(ctx).Msg("Run stopped") }() @@ -96,7 +96,7 @@ func (s *sourcePluginAdapter) Start(ctx context.Context, p record.Position) erro func (s *sourcePluginAdapter) Read(ctx context.Context) (record.Record, error) { if s.stream == nil { - return record.Record{}, plugin.ErrStreamNotOpen + return record.Record{}, connector.ErrStreamNotOpen } s.logger.Trace(ctx).Msg("receiving record") @@ -115,7 +115,7 @@ func (s *sourcePluginAdapter) Read(ctx context.Context) (record.Record, error) { func (s *sourcePluginAdapter) Ack(ctx context.Context, p record.Position) error { if s.stream == nil { - return plugin.ErrStreamNotOpen + return connector.ErrStreamNotOpen } req := toplugin.SourceRunRequest(p) diff --git a/pkg/plugin/builtin/v1/specifier.go b/pkg/plugin/connector/builtin/v1/specifier.go similarity index 75% rename from pkg/plugin/builtin/v1/specifier.go rename to pkg/plugin/connector/builtin/v1/specifier.go index d21fedcf5..f00c76a10 100644 --- a/pkg/plugin/builtin/v1/specifier.go +++ b/pkg/plugin/connector/builtin/v1/specifier.go @@ -19,9 +19,9 @@ import ( "github.com/conduitio/conduit-connector-protocol/cpluginv1" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/builtin/v1/internal/fromplugin" - "github.com/conduitio/conduit/pkg/plugin/builtin/v1/internal/toplugin" + "github.com/conduitio/conduit/pkg/plugin/connector" + "github.com/conduitio/conduit/pkg/plugin/connector/builtin/v1/internal/fromplugin" + "github.com/conduitio/conduit/pkg/plugin/connector/builtin/v1/internal/toplugin" ) type specifierPluginAdapter struct { @@ -30,7 +30,7 @@ type specifierPluginAdapter struct { logger log.CtxLogger } -var _ plugin.SpecifierPlugin = (*specifierPluginAdapter)(nil) +var _ connector.SpecifierPlugin = (*specifierPluginAdapter)(nil) func newSpecifierPluginAdapter(impl cpluginv1.SpecifierPlugin, logger log.CtxLogger) *specifierPluginAdapter { return &specifierPluginAdapter{ @@ -39,15 +39,15 @@ func newSpecifierPluginAdapter(impl cpluginv1.SpecifierPlugin, logger log.CtxLog } } -func (s *specifierPluginAdapter) Specify() (plugin.Specification, error) { +func (s *specifierPluginAdapter) Specify() (connector.Specification, error) { req := toplugin.SpecifierSpecifyRequest() resp, err := runSandbox(s.impl.Specify, context.Background(), req, s.logger) if err != nil { - return plugin.Specification{}, err + return connector.Specification{}, err } out, err := fromplugin.SpecifierSpecifyResponse(resp) if err != nil { - return plugin.Specification{}, err + return connector.Specification{}, err } return out, nil } diff --git a/pkg/plugin/builtin/v1/stream.go b/pkg/plugin/connector/builtin/v1/stream.go similarity index 96% rename from pkg/plugin/builtin/v1/stream.go rename to pkg/plugin/connector/builtin/v1/stream.go index 541792eb7..0175a525e 100644 --- a/pkg/plugin/builtin/v1/stream.go +++ b/pkg/plugin/connector/builtin/v1/stream.go @@ -19,7 +19,7 @@ import ( "io" "sync" - "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" ) // stream mimics the behavior of a gRPC stream using channels. @@ -73,7 +73,7 @@ func (s *stream[REQ, RES]) sendInternal(req REQ) error { case <-s.ctx.Done(): return s.ctx.Err() case <-s.stopChan: - return plugin.ErrStreamNotOpen + return connector.ErrStreamNotOpen case s.reqChan <- req: return nil } diff --git a/pkg/plugin/connector/errors.go b/pkg/plugin/connector/errors.go new file mode 100644 index 000000000..8c35ce942 --- /dev/null +++ b/pkg/plugin/connector/errors.go @@ -0,0 +1,48 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package connector + +import ( + "fmt" + + "github.com/conduitio/conduit/pkg/foundation/cerrors" +) + +var ErrStreamNotOpen = cerrors.New("stream not open") + +type ValidationError struct { + Err error +} + +// Error formats the error message. +func (e *ValidationError) Error() string { + if e == nil { + return "" + } + return fmt.Sprintf("validation error: %v", e.Err) +} + +// Unwrap returns the underlying error. +func (e *ValidationError) Unwrap() error { + if e == nil { + return nil + } + return e.Err +} + +func (e *ValidationError) Is(target error) bool { + _, ok := target.(*ValidationError) + return ok +} diff --git a/pkg/plugin/mock/plugin.go b/pkg/plugin/connector/mock/plugin.go similarity index 95% rename from pkg/plugin/mock/plugin.go rename to pkg/plugin/connector/mock/plugin.go index 987c62e06..e71059779 100644 --- a/pkg/plugin/mock/plugin.go +++ b/pkg/plugin/connector/mock/plugin.go @@ -1,5 +1,5 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/conduitio/conduit/pkg/plugin (interfaces: Dispenser,DestinationPlugin,SourcePlugin,SpecifierPlugin) +// Source: github.com/conduitio/conduit/pkg/plugin/connector (interfaces: Dispenser,DestinationPlugin,SourcePlugin,SpecifierPlugin) // // Generated by this command: // @@ -13,7 +13,7 @@ import ( context "context" reflect "reflect" - plugin "github.com/conduitio/conduit/pkg/plugin" + connector "github.com/conduitio/conduit/pkg/plugin/connector" record "github.com/conduitio/conduit/pkg/record" gomock "go.uber.org/mock/gomock" ) @@ -42,10 +42,10 @@ func (m *Dispenser) EXPECT() *DispenserMockRecorder { } // DispenseDestination mocks base method. -func (m *Dispenser) DispenseDestination() (plugin.DestinationPlugin, error) { +func (m *Dispenser) DispenseDestination() (connector.DestinationPlugin, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DispenseDestination") - ret0, _ := ret[0].(plugin.DestinationPlugin) + ret0, _ := ret[0].(connector.DestinationPlugin) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -57,10 +57,10 @@ func (mr *DispenserMockRecorder) DispenseDestination() *gomock.Call { } // DispenseSource mocks base method. -func (m *Dispenser) DispenseSource() (plugin.SourcePlugin, error) { +func (m *Dispenser) DispenseSource() (connector.SourcePlugin, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DispenseSource") - ret0, _ := ret[0].(plugin.SourcePlugin) + ret0, _ := ret[0].(connector.SourcePlugin) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -72,10 +72,10 @@ func (mr *DispenserMockRecorder) DispenseSource() *gomock.Call { } // DispenseSpecifier mocks base method. -func (m *Dispenser) DispenseSpecifier() (plugin.SpecifierPlugin, error) { +func (m *Dispenser) DispenseSpecifier() (connector.SpecifierPlugin, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "DispenseSpecifier") - ret0, _ := ret[0].(plugin.SpecifierPlugin) + ret0, _ := ret[0].(connector.SpecifierPlugin) ret1, _ := ret[1].(error) return ret0, ret1 } @@ -411,10 +411,10 @@ func (m *SpecifierPlugin) EXPECT() *SpecifierPluginMockRecorder { } // Specify mocks base method. -func (m *SpecifierPlugin) Specify() (plugin.Specification, error) { +func (m *SpecifierPlugin) Specify() (connector.Specification, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Specify") - ret0, _ := ret[0].(plugin.Specification) + ret0, _ := ret[0].(connector.Specification) ret1, _ := ret[1].(error) return ret0, ret1 } diff --git a/pkg/plugin/plugin.go b/pkg/plugin/connector/plugin.go similarity index 84% rename from pkg/plugin/plugin.go rename to pkg/plugin/connector/plugin.go index 578f4980b..9948ebffe 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/connector/plugin.go @@ -1,4 +1,4 @@ -// Copyright © 2022 Meroxa, Inc. +// Copyright © 2023 Meroxa, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -14,13 +14,11 @@ //go:generate mockgen -destination=mock/plugin.go -package=mock -mock_names=Dispenser=Dispenser,SourcePlugin=SourcePlugin,DestinationPlugin=DestinationPlugin,SpecifierPlugin=SpecifierPlugin . Dispenser,DestinationPlugin,SourcePlugin,SpecifierPlugin -package plugin +package connector import ( "context" - "strings" - "github.com/Masterminds/semver/v3" "github.com/conduitio/conduit/pkg/record" ) @@ -208,75 +206,3 @@ const ( ParameterTypeFile ParameterTypeDuration ) - -const ( - PluginTypeBuiltin = "builtin" - PluginTypeStandalone = "standalone" - PluginTypeAny = "any" - - PluginVersionLatest = "latest" -) - -type FullName string - -func NewFullName(pluginType, pluginName, pluginVersion string) FullName { - if pluginType != "" { - pluginType += ":" - } - if pluginVersion != "" { - pluginVersion = "@" + pluginVersion - } - return FullName(pluginType + pluginName + pluginVersion) -} - -func (fn FullName) PluginType() string { - tokens := strings.SplitN(string(fn), ":", 2) - if len(tokens) > 1 { - return tokens[0] - } - return PluginTypeAny // default -} - -func (fn FullName) PluginName() string { - name := string(fn) - - tokens := strings.SplitN(name, ":", 2) - if len(tokens) > 1 { - name = tokens[1] - } - - tokens = strings.SplitN(name, "@", 2) - if len(tokens) > 1 { - name = tokens[0] - } - - return name -} - -func (fn FullName) PluginVersion() string { - tokens := strings.SplitN(string(fn), "@", 2) - if len(tokens) > 1 { - return tokens[len(tokens)-1] - } - return PluginVersionLatest // default -} - -func (fn FullName) PluginVersionGreaterThan(other FullName) bool { - leftVersion := fn.PluginVersion() - rightVersion := other.PluginVersion() - - leftSemver, err := semver.NewVersion(leftVersion) - if err != nil { - return false // left is an invalid semver, right is greater either way - } - rightSemver, err := semver.NewVersion(rightVersion) - if err != nil { - return true // left is a valid semver, right is not, left is greater - } - - return leftSemver.GreaterThan(rightSemver) -} - -func (fn FullName) String() string { - return fn.PluginType() + ":" + fn.PluginName() + "@" + fn.PluginVersion() -} diff --git a/pkg/plugin/service.go b/pkg/plugin/connector/service.go similarity index 60% rename from pkg/plugin/service.go rename to pkg/plugin/connector/service.go index 1d74556b6..aef6c6386 100644 --- a/pkg/plugin/service.go +++ b/pkg/plugin/connector/service.go @@ -12,13 +12,14 @@ // See the License for the specific language governing permissions and // limitations under the License. -package plugin +package connector import ( "context" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin" ) // registry is an object that can create new plugin dispensers. We need to use @@ -32,41 +33,47 @@ import ( // separate process and communicates with it via gRPC. These plugins are // compiled independently of Conduit and can be included at runtime. type registry interface { - NewDispenser(logger log.CtxLogger, name FullName) (Dispenser, error) - List() map[FullName]Specification + NewDispenser(logger log.CtxLogger, name plugin.FullName) (Dispenser, error) + List() map[plugin.FullName]Specification } -type Service struct { +type PluginService struct { logger log.CtxLogger - builtin registry - standalone registry + builtinReg registry + standaloneReg registry } -func NewService(logger log.CtxLogger, builtin registry, standalone registry) *Service { - return &Service{ - logger: logger.WithComponent("plugin.Service"), - builtin: builtin, - standalone: standalone, +func NewPluginService( + logger log.CtxLogger, + builtin registry, + standalone registry, +) *PluginService { + return &PluginService{ + logger: logger.WithComponent("connector.PluginService"), + builtinReg: builtin, + standaloneReg: standalone, } } -func (s *Service) Check(_ context.Context) error { +func (s *PluginService) Check(context.Context) error { return nil } -func (s *Service) NewDispenser(logger log.CtxLogger, name string) (Dispenser, error) { - fullName := FullName(name) +func (s *PluginService) NewDispenser(logger log.CtxLogger, name string) (Dispenser, error) { + logger = logger.WithComponent("plugin") + + fullName := plugin.FullName(name) switch fullName.PluginType() { - case PluginTypeStandalone: - return s.standalone.NewDispenser(logger, fullName) - case PluginTypeBuiltin: - return s.builtin.NewDispenser(logger, fullName) - case PluginTypeAny: - d, err := s.standalone.NewDispenser(logger, fullName) + case plugin.PluginTypeStandalone: + return s.standaloneReg.NewDispenser(logger, fullName) + case plugin.PluginTypeBuiltin: + return s.builtinReg.NewDispenser(logger, fullName) + case plugin.PluginTypeAny: + d, err := s.standaloneReg.NewDispenser(logger, fullName) if err != nil { s.logger.Debug(context.Background()).Err(err).Msg("could not find standalone plugin dispenser, falling back to builtin plugin") - d, err = s.builtin.NewDispenser(logger, fullName) + d, err = s.builtinReg.NewDispenser(logger, fullName) } return d, err default: @@ -74,9 +81,9 @@ func (s *Service) NewDispenser(logger log.CtxLogger, name string) (Dispenser, er } } -func (s *Service) List(context.Context) (map[string]Specification, error) { - builtinSpecs := s.builtin.List() - standaloneSpecs := s.standalone.List() +func (s *PluginService) List(context.Context) (map[string]Specification, error) { + builtinSpecs := s.builtinReg.List() + standaloneSpecs := s.standaloneReg.List() specs := make(map[string]Specification, len(builtinSpecs)+len(standaloneSpecs)) for k, v := range builtinSpecs { @@ -89,7 +96,12 @@ func (s *Service) List(context.Context) (map[string]Specification, error) { return specs, nil } -func (s *Service) ValidateSourceConfig(ctx context.Context, d Dispenser, settings map[string]string) (err error) { +func (s *PluginService) ValidateSourceConfig(ctx context.Context, name string, settings map[string]string) (err error) { + d, err := s.NewDispenser(s.logger, name) + if err != nil { + return cerrors.Errorf("couldn't get dispenser: %w", err) + } + src, err := d.DispenseSource() if err != nil { return cerrors.Errorf("could not dispense source: %w", err) @@ -104,13 +116,18 @@ func (s *Service) ValidateSourceConfig(ctx context.Context, d Dispenser, setting err = src.Configure(ctx, settings) if err != nil { - return &ValidationError{err: err} + return &ValidationError{Err: err} } return nil } -func (s *Service) ValidateDestinationConfig(ctx context.Context, d Dispenser, settings map[string]string) (err error) { +func (s *PluginService) ValidateDestinationConfig(ctx context.Context, name string, settings map[string]string) (err error) { + d, err := s.NewDispenser(s.logger, name) + if err != nil { + return cerrors.Errorf("couldn't get dispenser: %w", err) + } + dest, err := d.DispenseDestination() if err != nil { return cerrors.Errorf("could not dispense destination: %w", err) @@ -125,7 +142,7 @@ func (s *Service) ValidateDestinationConfig(ctx context.Context, d Dispenser, se err = dest.Configure(ctx, settings) if err != nil { - return &ValidationError{err: err} + return &ValidationError{Err: err} } return nil diff --git a/pkg/plugin/standalone/registry.go b/pkg/plugin/connector/standalone/registry.go similarity index 68% rename from pkg/plugin/standalone/registry.go rename to pkg/plugin/connector/standalone/registry.go index 2a8be1c39..2eecfd0af 100644 --- a/pkg/plugin/standalone/registry.go +++ b/pkg/plugin/connector/standalone/registry.go @@ -24,7 +24,8 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" - standalonev1 "github.com/conduitio/conduit/pkg/plugin/standalone/v1" + "github.com/conduitio/conduit/pkg/plugin/connector" + standalonev1 "github.com/conduitio/conduit/pkg/plugin/connector/standalone/v1" "github.com/rs/zerolog" ) @@ -41,7 +42,7 @@ type Registry struct { type blueprint struct { fullName plugin.FullName - specification plugin.Specification + specification connector.Specification path string // TODO store hash of plugin binary and compare before running the binary to // ensure someone can't switch the plugin after we registered it @@ -49,14 +50,14 @@ type blueprint struct { func NewRegistry(logger log.CtxLogger, pluginDir string) *Registry { r := &Registry{ - logger: logger.WithComponent("standalone.Registry"), + logger: logger.WithComponentFromType(Registry{}), } if pluginDir != "" { // extract absolute path to make it clearer in the logs what directory is used absPluginDir, err := filepath.Abs(pluginDir) if err != nil { - r.logger.Warn(context.Background()).Err(err).Msg("could not extract absolute plugins path") + r.logger.Warn(context.Background()).Err(err).Msg("could not extract absolute connector plugins path") } else { r.pluginDir = absPluginDir // store plugin dir for hot reloads r.reloadPlugins() @@ -66,15 +67,11 @@ func NewRegistry(logger log.CtxLogger, pluginDir string) *Registry { r.logger.Info(context.Background()). Str(log.PluginPathField, r.pluginDir). Int("count", len(r.List())). - Msg("standalone plugins initialized") + Msg("standalone connector plugins initialized") return r } -func newFullName(pluginName, pluginVersion string) plugin.FullName { - return plugin.NewFullName(plugin.PluginTypeStandalone, pluginName, pluginVersion) -} - func (r *Registry) reloadPlugins() { plugins := r.loadPlugins(context.Background(), r.pluginDir) r.m.Lock() @@ -83,19 +80,19 @@ func (r *Registry) reloadPlugins() { } func (r *Registry) loadPlugins(ctx context.Context, pluginDir string) map[string]map[string]blueprint { - r.logger.Debug(ctx).Msgf("loading plugins from directory %v", pluginDir) + r.logger.Debug(ctx).Msgf("loading connector plugins from directory %v", pluginDir) plugins := make(map[string]map[string]blueprint) dirEntries, err := os.ReadDir(pluginDir) if err != nil { - r.logger.Warn(ctx).Err(err).Msg("could not read plugin directory") + r.logger.Warn(ctx).Err(err).Msg("could not read connector plugin directory") return plugins // return empty map } warn := func(ctx context.Context, err error, pluginPath string) { r.logger.Warn(ctx). Err(err). Str(log.PluginPathField, pluginPath). - Msgf("could not load standalone plugin") + Msgf("could not load standalone connector plugin") } for _, dirEntry := range dirEntries { @@ -106,24 +103,8 @@ func (r *Registry) loadPlugins(ctx context.Context, pluginDir string) map[string pluginPath := path.Join(pluginDir, dirEntry.Name()) - // create dispenser without a logger to not spam logs on refresh - dispenser, err := standalonev1.NewDispenser(zerolog.Nop(), pluginPath) - if err != nil { - err = cerrors.Errorf("failed to create dispenser: %w", err) - warn(ctx, err, pluginPath) - continue - } - - specPlugin, err := dispenser.DispenseSpecifier() - if err != nil { - err = cerrors.Errorf("failed to dispense specifier (tip: check if the file is a valid plugin binary and if you have permissions for running it): %w", err) - warn(ctx, err, pluginPath) - continue - } - - specs, err := specPlugin.Specify() + specs, err := r.loadSpecifications(pluginPath) if err != nil { - err = cerrors.Errorf("failed to get specs: %w", err) warn(ctx, err, pluginPath) continue } @@ -134,9 +115,9 @@ func (r *Registry) loadPlugins(ctx context.Context, pluginDir string) map[string plugins[specs.Name] = versionMap } - fullName := newFullName(specs.Name, specs.Version) + fullName := plugin.NewFullName(plugin.PluginTypeStandalone, specs.Name, specs.Version) if conflict, ok := versionMap[specs.Version]; ok { - err = cerrors.Errorf("conflict detected, plugin %v already registered, please remove either %v or %v, these plugins won't be usable until that happens", fullName, conflict.path, pluginPath) + err = cerrors.Errorf("conflict detected, connector plugin %v already registered, please remove either %v or %v, these plugins won't be usable until that happens", fullName, conflict.path, pluginPath) warn(ctx, err, pluginPath) // delete plugin from map at the end so that further duplicates can // still be found @@ -162,19 +143,39 @@ func (r *Registry) loadPlugins(ctx context.Context, pluginDir string) map[string r.logger.Debug(ctx). Str(log.PluginPathField, pluginPath). Str(log.PluginNameField, string(bp.fullName)). - Msg("set plugin as latest") + Msg("set connector plugin as latest") } r.logger.Debug(ctx). Str(log.PluginPathField, pluginPath). Str(log.PluginNameField, string(bp.fullName)). - Msg("loaded standalone plugin") + Msg("loaded standalone connector plugin") } return plugins } -func (r *Registry) NewDispenser(logger log.CtxLogger, fullName plugin.FullName) (plugin.Dispenser, error) { +func (r *Registry) loadSpecifications(pluginPath string) (connector.Specification, error) { + // create dispenser without a logger to not spam logs on refresh + dispenser, err := standalonev1.NewDispenser(zerolog.Nop(), pluginPath) + if err != nil { + return connector.Specification{}, cerrors.Errorf("failed to create connector dispenser: %w", err) + } + + specPlugin, err := dispenser.DispenseSpecifier() + if err != nil { + return connector.Specification{}, cerrors.Errorf("failed to dispense connector specifier (tip: check if the file is a valid connector plugin binary and if you have permissions for running it): %w", err) + } + + specs, err := specPlugin.Specify() + if err != nil { + return connector.Specification{}, cerrors.Errorf("failed to get connector specs: %w", err) + } + + return specs, nil +} + +func (r *Registry) NewDispenser(logger log.CtxLogger, fullName plugin.FullName) (connector.Dispenser, error) { r.m.RLock() defer r.m.RUnlock() @@ -188,18 +189,18 @@ func (r *Registry) NewDispenser(logger log.CtxLogger, fullName plugin.FullName) for k := range versionMap { availableVersions = append(availableVersions, k) } - return nil, cerrors.Errorf("could not find standalone plugin, only found versions %v: %w", availableVersions, plugin.ErrPluginNotFound) + return nil, cerrors.Errorf("could not find standalone connector plugin, only found versions %v: %w", availableVersions, plugin.ErrPluginNotFound) } logger = logger.WithComponent("plugin.standalone") return standalonev1.NewDispenser(logger.ZerologWithComponent(), bp.path) } -func (r *Registry) List() map[plugin.FullName]plugin.Specification { +func (r *Registry) List() map[plugin.FullName]connector.Specification { r.m.RLock() defer r.m.RUnlock() - specs := make(map[plugin.FullName]plugin.Specification, len(r.plugins)) + specs := make(map[plugin.FullName]connector.Specification, len(r.plugins)) for _, versions := range r.plugins { for version, bp := range versions { if version == plugin.PluginVersionLatest { diff --git a/pkg/plugin/standalone/registry_test.go b/pkg/plugin/connector/standalone/registry_test.go similarity index 76% rename from pkg/plugin/standalone/registry_test.go rename to pkg/plugin/connector/standalone/registry_test.go index 9e91ef2f8..8be043811 100644 --- a/pkg/plugin/standalone/registry_test.go +++ b/pkg/plugin/connector/standalone/registry_test.go @@ -22,10 +22,11 @@ import ( "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/matryer/is" ) -// These constants need to match the constants in pkg/plugin/standalone/test/testplugin/main.go +// These constants need to match the constants in pkg/plugin/connector/standalone/test/testplugin/main.go const ( testPluginDir = "./test" @@ -54,68 +55,68 @@ func testPluginBlueprint() blueprint { return blueprint{ fullName: plugin.FullName(fmt.Sprintf("standalone:%v@%v", testPluginName, testPluginVersion)), path: path.Join(testPluginDir, "testplugin.sh"), - specification: plugin.Specification{ + specification: connector.Specification{ Name: testPluginName, Summary: testPluginSummary, Description: testPluginDescription, Version: testPluginVersion, Author: testPluginAuthor, - SourceParams: map[string]plugin.Parameter{ + SourceParams: map[string]connector.Parameter{ testPluginSourceParam1: { Default: testPluginSourceParam1Default, - Type: plugin.ParameterTypeString, // default type + Type: connector.ParameterTypeString, // default type Description: testPluginSourceParam1Description, - Validations: []plugin.Validation{ + Validations: []connector.Validation{ { - Type: plugin.ValidationTypeRequired, + Type: connector.ValidationTypeRequired, Value: "", }, { - Type: plugin.ValidationTypeInclusion, + Type: connector.ValidationTypeInclusion, Value: "one,two", }, }, }, testPluginSourceParam2: { Default: testPluginSourceParam2Default, - Type: plugin.ParameterTypeInt, + Type: connector.ParameterTypeInt, Description: testPluginSourceParam2Description, - Validations: []plugin.Validation{ + Validations: []connector.Validation{ { - Type: plugin.ValidationTypeExclusion, + Type: connector.ValidationTypeExclusion, Value: "3,4", }, { - Type: plugin.ValidationTypeGreaterThan, + Type: connector.ValidationTypeGreaterThan, Value: "1", }, }, }, }, - DestinationParams: map[string]plugin.Parameter{ + DestinationParams: map[string]connector.Parameter{ testPluginDestinationParam1: { Default: testPluginDestinationParam1Default, - Type: plugin.ParameterTypeInt, + Type: connector.ParameterTypeInt, Description: testPluginDestinationParam1Description, - Validations: []plugin.Validation{ + Validations: []connector.Validation{ { - Type: plugin.ValidationTypeLessThan, + Type: connector.ValidationTypeLessThan, Value: "10", }, { - Type: plugin.ValidationTypeRegex, + Type: connector.ValidationTypeRegex, Value: "[1-9]", }, { - Type: plugin.ValidationTypeRequired, + Type: connector.ValidationTypeRequired, }, }, }, testPluginDestinationParam2: { Default: testPluginDestinationParam2Default, - Type: plugin.ParameterTypeDuration, + Type: connector.ParameterTypeDuration, Description: testPluginDestinationParam2Description, - Validations: []plugin.Validation{}, + Validations: []connector.Validation{}, }, }, }, @@ -145,7 +146,7 @@ func TestRegistry_List(t *testing.T) { got := r.List() bp := testPluginBlueprint() - want := map[plugin.FullName]plugin.Specification{ + want := map[plugin.FullName]connector.Specification{ bp.fullName: bp.specification, } is.Equal(got, want) diff --git a/pkg/plugin/standalone/test/testplugin.sh b/pkg/plugin/connector/standalone/test/testplugin.sh similarity index 100% rename from pkg/plugin/standalone/test/testplugin.sh rename to pkg/plugin/connector/standalone/test/testplugin.sh diff --git a/pkg/plugin/standalone/test/testplugin/main.go b/pkg/plugin/connector/standalone/test/testplugin/main.go similarity index 97% rename from pkg/plugin/standalone/test/testplugin/main.go rename to pkg/plugin/connector/standalone/test/testplugin/main.go index 2024fd4d4..91955e88c 100644 --- a/pkg/plugin/standalone/test/testplugin/main.go +++ b/pkg/plugin/connector/standalone/test/testplugin/main.go @@ -23,7 +23,7 @@ import ( "github.com/conduitio/conduit-connector-protocol/cpluginv1/server" ) -// These constants need to match the constants in pkg/plugin/standalone/registry_test.go +// These constants need to match the constants in pkg/plugin/connector/standalone/registry_test.go const ( testPluginName = "test-plugin" testPluginSummary = "My test plugin summary" diff --git a/pkg/plugin/standalone/v1/acceptance_test.go b/pkg/plugin/connector/standalone/v1/acceptance_test.go similarity index 81% rename from pkg/plugin/standalone/v1/acceptance_test.go rename to pkg/plugin/connector/standalone/v1/acceptance_test.go index 8286df925..7514c7b55 100644 --- a/pkg/plugin/standalone/v1/acceptance_test.go +++ b/pkg/plugin/connector/standalone/v1/acceptance_test.go @@ -18,13 +18,13 @@ import ( "testing" "github.com/conduitio/conduit-connector-protocol/cpluginv1/mock" - "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/rs/zerolog" ) func TestAcceptance(t *testing.T) { logger := zerolog.Nop() - plugin.AcceptanceTestV1(t, func(t *testing.T) (plugin.Dispenser, *mock.SpecifierPlugin, *mock.SourcePlugin, *mock.DestinationPlugin) { + connector.AcceptanceTestV1(t, func(t *testing.T) (connector.Dispenser, *mock.SpecifierPlugin, *mock.SourcePlugin, *mock.DestinationPlugin) { return newTestDispenser(t, logger) }) } diff --git a/pkg/plugin/standalone/v1/client.go b/pkg/plugin/connector/standalone/v1/client.go similarity index 100% rename from pkg/plugin/standalone/v1/client.go rename to pkg/plugin/connector/standalone/v1/client.go diff --git a/pkg/plugin/standalone/v1/destination.go b/pkg/plugin/connector/standalone/v1/destination.go similarity index 90% rename from pkg/plugin/standalone/v1/destination.go rename to pkg/plugin/connector/standalone/v1/destination.go index 091b19d16..4cd7ef19b 100644 --- a/pkg/plugin/standalone/v1/destination.go +++ b/pkg/plugin/connector/standalone/v1/destination.go @@ -20,9 +20,9 @@ import ( connectorv1 "github.com/conduitio/conduit-connector-protocol/proto/connector/v1" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/standalone/v1/internal/fromproto" - "github.com/conduitio/conduit/pkg/plugin/standalone/v1/internal/toproto" + "github.com/conduitio/conduit/pkg/plugin/connector" + "github.com/conduitio/conduit/pkg/plugin/connector/standalone/v1/internal/fromproto" + "github.com/conduitio/conduit/pkg/plugin/connector/standalone/v1/internal/toproto" "github.com/conduitio/conduit/pkg/record" goplugin "github.com/hashicorp/go-plugin" "google.golang.org/grpc" @@ -49,7 +49,7 @@ type destinationPluginClient struct { stream connectorv1.DestinationPlugin_RunClient } -var _ plugin.DestinationPlugin = (*destinationPluginClient)(nil) +var _ connector.DestinationPlugin = (*destinationPluginClient)(nil) func (s *destinationPluginClient) Configure(ctx context.Context, cfg map[string]string) error { protoReq := toproto.DestinationConfigureRequest(cfg) @@ -77,7 +77,7 @@ func (s *destinationPluginClient) Start(ctx context.Context) error { func (s *destinationPluginClient) Write(_ context.Context, r record.Record) error { if s.stream == nil { - return plugin.ErrStreamNotOpen + return connector.ErrStreamNotOpen } protoReq, err := toproto.DestinationRunRequest(r) @@ -89,7 +89,7 @@ func (s *destinationPluginClient) Write(_ context.Context, r record.Record) erro if err != nil { if err == io.EOF { // stream was gracefully closed - return plugin.ErrStreamNotOpen + return connector.ErrStreamNotOpen } return unwrapGRPCError(err) } @@ -98,13 +98,13 @@ func (s *destinationPluginClient) Write(_ context.Context, r record.Record) erro func (s *destinationPluginClient) Ack(_ context.Context) (record.Position, error) { if s.stream == nil { - return nil, plugin.ErrStreamNotOpen + return nil, connector.ErrStreamNotOpen } resp, err := s.stream.Recv() if err != nil { if err == io.EOF { - return nil, plugin.ErrStreamNotOpen + return nil, connector.ErrStreamNotOpen } return nil, unwrapGRPCError(err) } diff --git a/pkg/plugin/standalone/v1/dispenser.go b/pkg/plugin/connector/standalone/v1/dispenser.go similarity index 85% rename from pkg/plugin/standalone/v1/dispenser.go rename to pkg/plugin/connector/standalone/v1/dispenser.go index 7c3f9524d..cd26f0f3f 100644 --- a/pkg/plugin/standalone/v1/dispenser.go +++ b/pkg/plugin/connector/standalone/v1/dispenser.go @@ -19,7 +19,7 @@ import ( "sync" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" goplugin "github.com/hashicorp/go-plugin" "github.com/rs/zerolog" ) @@ -91,7 +91,7 @@ func (d *Dispenser) teardown() { d.dispensed = false } -func (d *Dispenser) DispenseSpecifier() (plugin.SpecifierPlugin, error) { +func (d *Dispenser) DispenseSpecifier() (connector.SpecifierPlugin, error) { err := d.dispense() if err != nil { return nil, err @@ -106,7 +106,7 @@ func (d *Dispenser) DispenseSpecifier() (plugin.SpecifierPlugin, error) { return nil, err } - specifier, ok := raw.(plugin.SpecifierPlugin) + specifier, ok := raw.(connector.SpecifierPlugin) if !ok { return nil, cerrors.Errorf("plugin did not dispense specifier, got type: %T", raw) } @@ -114,7 +114,7 @@ func (d *Dispenser) DispenseSpecifier() (plugin.SpecifierPlugin, error) { return specifierPluginDispenserSignaller{specifier, d}, nil } -func (d *Dispenser) DispenseSource() (plugin.SourcePlugin, error) { +func (d *Dispenser) DispenseSource() (connector.SourcePlugin, error) { err := d.dispense() if err != nil { return nil, err @@ -129,7 +129,7 @@ func (d *Dispenser) DispenseSource() (plugin.SourcePlugin, error) { return nil, err } - source, ok := raw.(plugin.SourcePlugin) + source, ok := raw.(connector.SourcePlugin) if !ok { return nil, cerrors.Errorf("plugin did not dispense source, got type: %T", raw) } @@ -137,7 +137,7 @@ func (d *Dispenser) DispenseSource() (plugin.SourcePlugin, error) { return sourcePluginDispenserSignaller{source, d}, nil } -func (d *Dispenser) DispenseDestination() (plugin.DestinationPlugin, error) { +func (d *Dispenser) DispenseDestination() (connector.DestinationPlugin, error) { err := d.dispense() if err != nil { return nil, err @@ -152,7 +152,7 @@ func (d *Dispenser) DispenseDestination() (plugin.DestinationPlugin, error) { return nil, err } - destination, ok := raw.(plugin.DestinationPlugin) + destination, ok := raw.(connector.DestinationPlugin) if !ok { return nil, cerrors.Errorf("plugin did not dispense destination, got type: %T", raw) } @@ -161,17 +161,17 @@ func (d *Dispenser) DispenseDestination() (plugin.DestinationPlugin, error) { } type specifierPluginDispenserSignaller struct { - plugin.SpecifierPlugin + connector.SpecifierPlugin d *Dispenser } -func (s specifierPluginDispenserSignaller) Specify() (plugin.Specification, error) { +func (s specifierPluginDispenserSignaller) Specify() (connector.Specification, error) { defer s.d.teardown() return s.SpecifierPlugin.Specify() } type sourcePluginDispenserSignaller struct { - plugin.SourcePlugin + connector.SourcePlugin d *Dispenser } @@ -181,7 +181,7 @@ func (s sourcePluginDispenserSignaller) Teardown(ctx context.Context) error { } type destinationPluginDispenserSignaller struct { - plugin.DestinationPlugin + connector.DestinationPlugin d *Dispenser } diff --git a/pkg/plugin/standalone/v1/dispenser_test.go b/pkg/plugin/connector/standalone/v1/dispenser_test.go similarity index 96% rename from pkg/plugin/standalone/v1/dispenser_test.go rename to pkg/plugin/connector/standalone/v1/dispenser_test.go index 32f90fc74..4ee30d046 100644 --- a/pkg/plugin/standalone/v1/dispenser_test.go +++ b/pkg/plugin/connector/standalone/v1/dispenser_test.go @@ -22,14 +22,14 @@ import ( "github.com/conduitio/conduit-connector-protocol/cpluginv1" "github.com/conduitio/conduit-connector-protocol/cpluginv1/mock" "github.com/conduitio/conduit-connector-protocol/cpluginv1/server" - "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" goplugin "github.com/hashicorp/go-plugin" "github.com/rs/zerolog" "go.uber.org/mock/gomock" ) func newTestDispenser(t *testing.T, logger zerolog.Logger) ( - plugin.Dispenser, + connector.Dispenser, *mock.SpecifierPlugin, *mock.SourcePlugin, *mock.DestinationPlugin, diff --git a/pkg/plugin/standalone/v1/internal/fromproto/destination.go b/pkg/plugin/connector/standalone/v1/internal/fromproto/destination.go similarity index 100% rename from pkg/plugin/standalone/v1/internal/fromproto/destination.go rename to pkg/plugin/connector/standalone/v1/internal/fromproto/destination.go diff --git a/pkg/plugin/standalone/v1/internal/fromproto/record.go b/pkg/plugin/connector/standalone/v1/internal/fromproto/record.go similarity index 100% rename from pkg/plugin/standalone/v1/internal/fromproto/record.go rename to pkg/plugin/connector/standalone/v1/internal/fromproto/record.go diff --git a/pkg/plugin/standalone/v1/internal/fromproto/source.go b/pkg/plugin/connector/standalone/v1/internal/fromproto/source.go similarity index 100% rename from pkg/plugin/standalone/v1/internal/fromproto/source.go rename to pkg/plugin/connector/standalone/v1/internal/fromproto/source.go diff --git a/pkg/plugin/standalone/v1/internal/fromproto/specifier.go b/pkg/plugin/connector/standalone/v1/internal/fromproto/specifier.go similarity index 52% rename from pkg/plugin/standalone/v1/internal/fromproto/specifier.go rename to pkg/plugin/connector/standalone/v1/internal/fromproto/specifier.go index 71f3670a5..c3af4b737 100644 --- a/pkg/plugin/standalone/v1/internal/fromproto/specifier.go +++ b/pkg/plugin/connector/standalone/v1/internal/fromproto/specifier.go @@ -18,30 +18,30 @@ import ( "fmt" connectorv1 "github.com/conduitio/conduit-connector-protocol/proto/connector/v1" - "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/connector" ) func _() { // An "invalid array index" compiler error signifies that the constant values have changed. var vTypes [1]struct{} - _ = vTypes[int(plugin.ValidationTypeRequired)-int(connectorv1.Specifier_Parameter_Validation_TYPE_REQUIRED)] - _ = vTypes[int(plugin.ValidationTypeLessThan)-int(connectorv1.Specifier_Parameter_Validation_TYPE_LESS_THAN)] - _ = vTypes[int(plugin.ValidationTypeGreaterThan)-int(connectorv1.Specifier_Parameter_Validation_TYPE_GREATER_THAN)] - _ = vTypes[int(plugin.ValidationTypeInclusion)-int(connectorv1.Specifier_Parameter_Validation_TYPE_INCLUSION)] - _ = vTypes[int(plugin.ValidationTypeExclusion)-int(connectorv1.Specifier_Parameter_Validation_TYPE_EXCLUSION)] - _ = vTypes[int(plugin.ValidationTypeRegex)-int(connectorv1.Specifier_Parameter_Validation_TYPE_REGEX)] + _ = vTypes[int(connector.ValidationTypeRequired)-int(connectorv1.Specifier_Parameter_Validation_TYPE_REQUIRED)] + _ = vTypes[int(connector.ValidationTypeLessThan)-int(connectorv1.Specifier_Parameter_Validation_TYPE_LESS_THAN)] + _ = vTypes[int(connector.ValidationTypeGreaterThan)-int(connectorv1.Specifier_Parameter_Validation_TYPE_GREATER_THAN)] + _ = vTypes[int(connector.ValidationTypeInclusion)-int(connectorv1.Specifier_Parameter_Validation_TYPE_INCLUSION)] + _ = vTypes[int(connector.ValidationTypeExclusion)-int(connectorv1.Specifier_Parameter_Validation_TYPE_EXCLUSION)] + _ = vTypes[int(connector.ValidationTypeRegex)-int(connectorv1.Specifier_Parameter_Validation_TYPE_REGEX)] // parameter types - _ = vTypes[int(plugin.ParameterTypeString)-int(connectorv1.Specifier_Parameter_TYPE_STRING)] - _ = vTypes[int(plugin.ParameterTypeInt)-int(connectorv1.Specifier_Parameter_TYPE_INT)] - _ = vTypes[int(plugin.ParameterTypeFloat)-int(connectorv1.Specifier_Parameter_TYPE_FLOAT)] - _ = vTypes[int(plugin.ParameterTypeBool)-int(connectorv1.Specifier_Parameter_TYPE_BOOL)] - _ = vTypes[int(plugin.ParameterTypeFile)-int(connectorv1.Specifier_Parameter_TYPE_FILE)] - _ = vTypes[int(plugin.ParameterTypeDuration)-int(connectorv1.Specifier_Parameter_TYPE_DURATION)] + _ = vTypes[int(connector.ParameterTypeString)-int(connectorv1.Specifier_Parameter_TYPE_STRING)] + _ = vTypes[int(connector.ParameterTypeInt)-int(connectorv1.Specifier_Parameter_TYPE_INT)] + _ = vTypes[int(connector.ParameterTypeFloat)-int(connectorv1.Specifier_Parameter_TYPE_FLOAT)] + _ = vTypes[int(connector.ParameterTypeBool)-int(connectorv1.Specifier_Parameter_TYPE_BOOL)] + _ = vTypes[int(connector.ParameterTypeFile)-int(connectorv1.Specifier_Parameter_TYPE_FILE)] + _ = vTypes[int(connector.ParameterTypeDuration)-int(connectorv1.Specifier_Parameter_TYPE_DURATION)] } -func SpecifierSpecifyResponse(in *connectorv1.Specifier_Specify_Response) (plugin.Specification, error) { - specMap := func(in map[string]*connectorv1.Specifier_Parameter) (map[string]plugin.Parameter, error) { - out := make(map[string]plugin.Parameter, len(in)) +func SpecifierSpecifyResponse(in *connectorv1.Specifier_Specify_Response) (connector.Specification, error) { + specMap := func(in map[string]*connectorv1.Specifier_Parameter) (map[string]connector.Parameter, error) { + out := make(map[string]connector.Parameter, len(in)) var err error for k, v := range in { out[k], err = SpecifierParameter(v) @@ -54,15 +54,15 @@ func SpecifierSpecifyResponse(in *connectorv1.Specifier_Specify_Response) (plugi sourceParams, err := specMap(in.SourceParams) if err != nil { - return plugin.Specification{}, fmt.Errorf("error converting SourceSpec: %w", err) + return connector.Specification{}, fmt.Errorf("error converting SourceSpec: %w", err) } destinationParams, err := specMap(in.DestinationParams) if err != nil { - return plugin.Specification{}, fmt.Errorf("error converting DestinationSpec: %w", err) + return connector.Specification{}, fmt.Errorf("error converting DestinationSpec: %w", err) } - out := plugin.Specification{ + out := connector.Specification{ Name: in.Name, Summary: in.Summary, Description: in.Description, @@ -74,13 +74,13 @@ func SpecifierSpecifyResponse(in *connectorv1.Specifier_Specify_Response) (plugi return out, nil } -func SpecifierParameter(in *connectorv1.Specifier_Parameter) (plugin.Parameter, error) { - validations := make([]plugin.Validation, len(in.Validations)) +func SpecifierParameter(in *connectorv1.Specifier_Parameter) (connector.Parameter, error) { + validations := make([]connector.Validation, len(in.Validations)) requiredExists := false for i, v := range in.Validations { - validations[i] = plugin.Validation{ - Type: plugin.ValidationType(v.Type), + validations[i] = connector.Validation{ + Type: connector.ValidationType(v.Type), Value: v.Value, } if v.Type == connectorv1.Specifier_Parameter_Validation_TYPE_REQUIRED { @@ -90,12 +90,12 @@ func SpecifierParameter(in *connectorv1.Specifier_Parameter) (plugin.Parameter, // needed for backward compatibility, in.Required is converted to a validation of type ValidationTypeRequired // making sure not to duplicate the required validation if in.Required && !requiredExists { //nolint: staticcheck // required is still supported for now - validations = append(validations, plugin.Validation{ //nolint: makezero // list is full so need to append - Type: plugin.ValidationTypeRequired, + validations = append(validations, connector.Validation{ //nolint: makezero // list is full so need to append + Type: connector.ValidationTypeRequired, }) } - out := plugin.Parameter{ + out := connector.Parameter{ Default: in.Default, Description: in.Description, Type: connectorv1ParamTypeToPluginParamType(in.Type), @@ -104,10 +104,10 @@ func SpecifierParameter(in *connectorv1.Specifier_Parameter) (plugin.Parameter, return out, nil } -func connectorv1ParamTypeToPluginParamType(t connectorv1.Specifier_Parameter_Type) plugin.ParameterType { +func connectorv1ParamTypeToPluginParamType(t connectorv1.Specifier_Parameter_Type) connector.ParameterType { // default type should be string if t == connectorv1.Specifier_Parameter_TYPE_UNSPECIFIED { - return plugin.ParameterTypeString + return connector.ParameterTypeString } - return plugin.ParameterType(t) + return connector.ParameterType(t) } diff --git a/pkg/plugin/standalone/v1/internal/toproto/destination.go b/pkg/plugin/connector/standalone/v1/internal/toproto/destination.go similarity index 100% rename from pkg/plugin/standalone/v1/internal/toproto/destination.go rename to pkg/plugin/connector/standalone/v1/internal/toproto/destination.go diff --git a/pkg/plugin/standalone/v1/internal/toproto/record.go b/pkg/plugin/connector/standalone/v1/internal/toproto/record.go similarity index 100% rename from pkg/plugin/standalone/v1/internal/toproto/record.go rename to pkg/plugin/connector/standalone/v1/internal/toproto/record.go diff --git a/pkg/plugin/standalone/v1/internal/toproto/source.go b/pkg/plugin/connector/standalone/v1/internal/toproto/source.go similarity index 100% rename from pkg/plugin/standalone/v1/internal/toproto/source.go rename to pkg/plugin/connector/standalone/v1/internal/toproto/source.go diff --git a/pkg/plugin/standalone/v1/internal/toproto/specifier.go b/pkg/plugin/connector/standalone/v1/internal/toproto/specifier.go similarity index 100% rename from pkg/plugin/standalone/v1/internal/toproto/specifier.go rename to pkg/plugin/connector/standalone/v1/internal/toproto/specifier.go diff --git a/pkg/plugin/standalone/v1/logger.go b/pkg/plugin/connector/standalone/v1/logger.go similarity index 100% rename from pkg/plugin/standalone/v1/logger.go rename to pkg/plugin/connector/standalone/v1/logger.go diff --git a/pkg/plugin/standalone/v1/logger_test.go b/pkg/plugin/connector/standalone/v1/logger_test.go similarity index 100% rename from pkg/plugin/standalone/v1/logger_test.go rename to pkg/plugin/connector/standalone/v1/logger_test.go diff --git a/pkg/plugin/standalone/v1/metadata_test.go b/pkg/plugin/connector/standalone/v1/metadata_test.go similarity index 100% rename from pkg/plugin/standalone/v1/metadata_test.go rename to pkg/plugin/connector/standalone/v1/metadata_test.go diff --git a/pkg/plugin/standalone/v1/source.go b/pkg/plugin/connector/standalone/v1/source.go similarity index 90% rename from pkg/plugin/standalone/v1/source.go rename to pkg/plugin/connector/standalone/v1/source.go index 0a6f2d92a..b744055a6 100644 --- a/pkg/plugin/standalone/v1/source.go +++ b/pkg/plugin/connector/standalone/v1/source.go @@ -20,9 +20,9 @@ import ( connectorv1 "github.com/conduitio/conduit-connector-protocol/proto/connector/v1" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/standalone/v1/internal/fromproto" - "github.com/conduitio/conduit/pkg/plugin/standalone/v1/internal/toproto" + "github.com/conduitio/conduit/pkg/plugin/connector" + "github.com/conduitio/conduit/pkg/plugin/connector/standalone/v1/internal/fromproto" + "github.com/conduitio/conduit/pkg/plugin/connector/standalone/v1/internal/toproto" "github.com/conduitio/conduit/pkg/record" goplugin "github.com/hashicorp/go-plugin" "google.golang.org/grpc" @@ -49,7 +49,7 @@ type sourcePluginClient struct { stream connectorv1.SourcePlugin_RunClient } -var _ plugin.SourcePlugin = (*sourcePluginClient)(nil) +var _ connector.SourcePlugin = (*sourcePluginClient)(nil) func (s *sourcePluginClient) Configure(ctx context.Context, cfg map[string]string) error { protoReq := toproto.SourceConfigureRequest(cfg) @@ -77,13 +77,13 @@ func (s *sourcePluginClient) Start(ctx context.Context, p record.Position) error func (s *sourcePluginClient) Read(context.Context) (record.Record, error) { if s.stream == nil { - return record.Record{}, plugin.ErrStreamNotOpen + return record.Record{}, connector.ErrStreamNotOpen } protoResp, err := s.stream.Recv() if err != nil { if err == io.EOF { - return record.Record{}, plugin.ErrStreamNotOpen + return record.Record{}, connector.ErrStreamNotOpen } return record.Record{}, unwrapGRPCError(err) } @@ -96,7 +96,7 @@ func (s *sourcePluginClient) Read(context.Context) (record.Record, error) { func (s *sourcePluginClient) Ack(_ context.Context, p record.Position) error { if s.stream == nil { - return plugin.ErrStreamNotOpen + return connector.ErrStreamNotOpen } protoReq := toproto.SourceRunRequest(p) @@ -104,7 +104,7 @@ func (s *sourcePluginClient) Ack(_ context.Context, p record.Position) error { if err != nil { if err == io.EOF { // stream was gracefully closed - return plugin.ErrStreamNotOpen + return connector.ErrStreamNotOpen } return unwrapGRPCError(err) } diff --git a/pkg/plugin/standalone/v1/specifier.go b/pkg/plugin/connector/standalone/v1/specifier.go similarity index 79% rename from pkg/plugin/standalone/v1/specifier.go rename to pkg/plugin/connector/standalone/v1/specifier.go index e1be59f19..5abc749e5 100644 --- a/pkg/plugin/standalone/v1/specifier.go +++ b/pkg/plugin/connector/standalone/v1/specifier.go @@ -19,9 +19,9 @@ import ( connectorv1 "github.com/conduitio/conduit-connector-protocol/proto/connector/v1" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/standalone/v1/internal/fromproto" - "github.com/conduitio/conduit/pkg/plugin/standalone/v1/internal/toproto" + "github.com/conduitio/conduit/pkg/plugin/connector" + "github.com/conduitio/conduit/pkg/plugin/connector/standalone/v1/internal/fromproto" + "github.com/conduitio/conduit/pkg/plugin/connector/standalone/v1/internal/toproto" goplugin "github.com/hashicorp/go-plugin" "google.golang.org/grpc" ) @@ -46,17 +46,17 @@ type specifierPluginClient struct { grpcClient connectorv1.SpecifierPluginClient } -var _ plugin.SpecifierPlugin = (*specifierPluginClient)(nil) +var _ connector.SpecifierPlugin = (*specifierPluginClient)(nil) -func (s *specifierPluginClient) Specify() (plugin.Specification, error) { +func (s *specifierPluginClient) Specify() (connector.Specification, error) { protoReq := toproto.SpecifierSpecifyRequest() protoResp, err := s.grpcClient.Specify(context.Background(), protoReq) if err != nil { - return plugin.Specification{}, unwrapGRPCError(err) + return connector.Specification{}, unwrapGRPCError(err) } specs, err := fromproto.SpecifierSpecifyResponse(protoResp) if err != nil { - return plugin.Specification{}, err + return connector.Specification{}, err } return specs, nil } diff --git a/pkg/plugin/errors.go b/pkg/plugin/errors.go index 98671018d..41db5645e 100644 --- a/pkg/plugin/errors.go +++ b/pkg/plugin/errors.go @@ -15,39 +15,11 @@ package plugin import ( - "fmt" - "github.com/conduitio/conduit/pkg/foundation/cerrors" ) var ( - ErrStreamNotOpen = cerrors.New("stream not open") ErrPluginNotFound = cerrors.New("plugin not found") ErrPluginNotRunning = cerrors.New("plugin is not running") ErrUnimplemented = cerrors.New("method not implemented") ) - -type ValidationError struct { - err error -} - -// Error formats the error message. -func (e *ValidationError) Error() string { - if e == nil { - return "" - } - return fmt.Sprintf("validation error: %v", e.err) -} - -// Unwrap returns the underlying error. -func (e *ValidationError) Unwrap() error { - if e == nil { - return nil - } - return e.err -} - -func (e *ValidationError) Is(target error) bool { - _, ok := target.(*ValidationError) - return ok -} diff --git a/pkg/plugin/name.go b/pkg/plugin/name.go new file mode 100644 index 000000000..f6a31fb71 --- /dev/null +++ b/pkg/plugin/name.go @@ -0,0 +1,93 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package plugin + +import ( + "strings" + + "github.com/Masterminds/semver/v3" +) + +const ( + PluginTypeBuiltin = "builtin" + PluginTypeStandalone = "standalone" + PluginTypeAny = "any" + + PluginVersionLatest = "latest" +) + +type FullName string + +func NewFullName(pluginType, pluginName, pluginVersion string) FullName { + if pluginType != "" { + pluginType += ":" + } + if pluginVersion != "" { + pluginVersion = "@" + pluginVersion + } + return FullName(pluginType + pluginName + pluginVersion) +} + +func (fn FullName) PluginType() string { + tokens := strings.SplitN(string(fn), ":", 2) + if len(tokens) > 1 { + return tokens[0] + } + return PluginTypeAny // default +} + +func (fn FullName) PluginName() string { + name := string(fn) + + tokens := strings.SplitN(name, ":", 2) + if len(tokens) > 1 { + name = tokens[1] + } + + tokens = strings.SplitN(name, "@", 2) + if len(tokens) > 1 { + name = tokens[0] + } + + return name +} + +func (fn FullName) PluginVersion() string { + tokens := strings.SplitN(string(fn), "@", 2) + if len(tokens) > 1 { + return tokens[len(tokens)-1] + } + return PluginVersionLatest // default +} + +func (fn FullName) PluginVersionGreaterThan(other FullName) bool { + leftVersion := fn.PluginVersion() + rightVersion := other.PluginVersion() + + leftSemver, err := semver.NewVersion(leftVersion) + if err != nil { + return false // left is an invalid semver, right is greater either way + } + rightSemver, err := semver.NewVersion(rightVersion) + if err != nil { + return true // left is a valid semver, right is not, left is greater + } + + return leftSemver.GreaterThan(rightSemver) +} + +func (fn FullName) String() string { + return fn.PluginType() + ":" + fn.PluginName() + "@" + fn.PluginVersion() +} diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/name_test.go similarity index 100% rename from pkg/plugin/plugin_test.go rename to pkg/plugin/name_test.go diff --git a/pkg/plugin/processor/builtin/impl/avro/config.go b/pkg/plugin/processor/builtin/impl/avro/config.go new file mode 100644 index 000000000..80251a774 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/config.go @@ -0,0 +1,180 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package avro + +import ( + "crypto/tls" + "crypto/x509" + "fmt" + "os" + + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/multierror" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro/schemaregistry" + "github.com/lovromazgon/franz-go/pkg/sr" +) + +type preRegisteredConfig struct { + // The subject of the schema in the schema registry used to encode the record. + Subject string `json:"subject"` + // The version of the schema in the schema registry used to encode the record. + Version int `json:"version" validate:"gt=0"` +} + +type schemaConfig struct { + // Strategy to use to determine the schema for the record. + // Available strategies are: + // * `preRegistered` (recommended) - Download an existing schema from the schema registry. + // This strategy is further configured with options starting with `schema.preRegistered.*`. + // * `autoRegister` (for development purposes) - Infer the schema from the record and register it + // in the schema registry. This strategy is further configured with options starting with + // `schema.autoRegister.*`. + // + // For more information about the behavior of each strategy read the main processor description. + StrategyType string `json:"strategy" validate:"required,inclusion=preRegistered|autoRegister"` + + PreRegistered preRegisteredConfig `json:"preRegistered"` + + // The subject name under which the inferred schema will be registered in the schema registry. + AutoRegisteredSubject string `json:"autoRegister.subject"` + + strategy schemaregistry.SchemaStrategy +} + +func (c *schemaConfig) parse() error { + switch c.StrategyType { + case "preRegistered": + return c.parsePreRegistered() + case "autoRegister": + return c.parseAutoRegister() + default: + return cerrors.Errorf("unknown schema strategy %q", c.StrategyType) + } +} + +func (c *schemaConfig) parsePreRegistered() error { + if c.PreRegistered.Subject == "" { + return cerrors.New("subject required for schema strategy 'preRegistered'") + } + // TODO allow version to be set to "latest" + if c.PreRegistered.Version <= 0 { + return cerrors.Errorf("version needs to be positive: %v", c.PreRegistered.Version) + } + + c.strategy = schemaregistry.DownloadSchemaStrategy{ + Subject: c.PreRegistered.Subject, + Version: c.PreRegistered.Version, + } + return nil +} + +func (c *schemaConfig) parseAutoRegister() error { + if c.AutoRegisteredSubject == "" { + return cerrors.New("subject required for schema strategy 'autoRegister'") + } + + c.strategy = schemaregistry.ExtractAndUploadSchemaStrategy{ + Type: sr.TypeAvro, + Subject: c.AutoRegisteredSubject, + } + return nil +} + +type authConfig struct { + // The username to use with basic authentication. This option is required if + // auth.basic.password contains a value. If both auth.basic.username and auth.basic.password + // are empty basic authentication is disabled. + Username string `json:"basic.username"` + // The password to use with basic authentication. This option is required if + // auth.basic.username contains a value. If both auth.basic.username and auth.basic.password + // are empty basic authentication is disabled. + Password string `json:"basic.password"` +} + +func (c *authConfig) validate() error { + switch { + case c.Username == "" && c.Password == "": + // no basic auth set + return nil + case c.Username == "": + return cerrors.Errorf("specify a username to enable basic auth or remove field password") + case c.Password == "": + return cerrors.Errorf("specify a password to enable basic auth or remove field username") + } + + return nil +} + +type clientCert struct { + // The path to a file containing a PEM encoded certificate. This option is required + // if tls.client.key contains a value. If both tls.client.cert and tls.client.key are empty + // TLS is disabled. + Cert string `json:"cert"` + // The path to a file containing a PEM encoded private key. This option is required + // if tls.client.cert contains a value. If both tls.client.cert and tls.client.key are empty + // TLS is disabled. + Key string `json:"key"` +} + +type tlsConfig struct { + // The path to a file containing PEM encoded CA certificates. If this option is empty, + // Conduit falls back to using the host's root CA set. + CACert string `json:"ca.cert"` + + Client clientCert `json:"client"` + + tlsClientCert *tls.Certificate + tlsCACert *x509.CertPool +} + +func (c *tlsConfig) parse() error { + if c.Client.Cert == "" && c.Client.Key == "" && c.CACert == "" { + // no tls config set + return nil + } else if c.Client.Cert == "" || c.Client.Key == "" { + // we are missing some configuration fields + err := cerrors.New("invalid TLS config") + if c.Client.Cert == "" { + err = multierror.Append(err, cerrors.New("missing field: tls.client.cert")) + } + if c.Client.Key == "" { + err = multierror.Append(err, cerrors.New("missing field: tls.client.key")) + } + // CA cert is optional, we don't check if it's missing + return err + } + + clientCert, err := tls.LoadX509KeyPair(c.Client.Cert, c.Client.Key) + if err != nil { + return fmt.Errorf("failed to load client certificate: %w", err) + } + + c.tlsClientCert = &clientCert + + if c.CACert != "" { + // load custom CA cert + caCert, err := os.ReadFile(c.CACert) + if err != nil { + return fmt.Errorf("failed to load CA certificate: %w", err) + } + caCertPool := x509.NewCertPool() + if ok := caCertPool.AppendCertsFromPEM(caCert); !ok { + return cerrors.New("invalid CA cert") + } + c.tlsCACert = caCertPool + } + + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/avro/config_test.go b/pkg/plugin/processor/builtin/impl/avro/config_test.go new file mode 100644 index 000000000..969150207 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/config_test.go @@ -0,0 +1,215 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package avro + +import ( + "context" + "testing" + + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/matryer/is" +) + +func TestConfig_Parse(t *testing.T) { + testCases := []struct { + name string + input map[string]string + want encodeConfig + wantErr error + }{ + { + name: "preRegistered", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "preRegistered", + "schema.preRegistered.subject": "testsubject", + "schema.preRegistered.version": "123", + }, + want: encodeConfig{ + URL: "http://localhost", + Field: ".Payload.After", + Schema: schemaConfig{ + StrategyType: "preRegistered", + PreRegistered: preRegisteredConfig{ + Subject: "testsubject", + Version: 123, + }, + }, + }, + }, + { + name: "preRegistered without version", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "preRegistered", + "schema.preRegistered.subject": "testsubject", + }, + wantErr: cerrors.New("failed parsing schema strategy: version needs to be positive: 0"), + }, + { + name: "preRegistered without subject", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "preRegistered", + "schema.preRegistered.version": "123", + }, + wantErr: cerrors.New("failed parsing schema strategy: subject required for schema strategy 'preRegistered'"), + }, + { + name: "autoRegister", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + }, + want: encodeConfig{ + URL: "http://localhost", + Field: ".Payload.After", + Schema: schemaConfig{ + StrategyType: "autoRegister", + AutoRegisteredSubject: "testsubject", + }, + }, + }, + { + name: "autoRegister without subject", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + }, + wantErr: cerrors.New("failed parsing schema strategy: subject required for schema strategy 'autoRegister'"), + }, + { + name: "non-default target field", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + "field": ".Payload.After.something", + }, + want: encodeConfig{ + Field: ".Payload.After.something", + URL: "http://localhost", + Schema: schemaConfig{ + StrategyType: "autoRegister", + AutoRegisteredSubject: "testsubject", + }, + }, + }, + { + name: "valid auth", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + "auth.basic.username": "user@example.com", + "auth.basic.password": "Passw0rd", + }, + want: encodeConfig{ + URL: "http://localhost", + Field: ".Payload.After", + Schema: schemaConfig{ + StrategyType: "autoRegister", + AutoRegisteredSubject: "testsubject", + }, + Auth: authConfig{ + Username: "user@example.com", + Password: "Passw0rd", + }, + }, + }, + { + name: "auth -- no username", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + "auth.basic.password": "Passw0rd", + }, + wantErr: cerrors.New("invalid basic auth: specify a username to enable basic auth or remove field password"), + }, + { + name: "auth -- no password", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + "auth.basic.username": "username@example.com", + }, + wantErr: cerrors.New("invalid basic auth: specify a password to enable basic auth or remove field username"), + }, + { + name: "tls: missing client cert and key", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + "tls.ca.cert": "/tmp/something", + }, + wantErr: cerrors.New(`failed parsing TLS: invalid TLS config +missing field: tls.client.cert +missing field: tls.client.key`), + }, + { + name: "valid tls", + input: map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + "tls.ca.cert": "testdata/cert.pem", + "tls.client.cert": "testdata/ca.pem", + "tls.client.key": "testdata/ca-key.pem", + }, + want: encodeConfig{ + Field: ".Payload.After", + URL: "http://localhost", + Schema: schemaConfig{ + StrategyType: "autoRegister", + AutoRegisteredSubject: "testsubject", + }, + TLS: tlsConfig{ + CACert: "testdata/cert.pem", + Client: clientCert{ + Cert: "testdata/ca.pem", + Key: "testdata/ca-key.pem", + }, + }, + }, + }, + } + + cmpOpts := cmpopts.IgnoreUnexported(encodeConfig{}, schemaConfig{}, tlsConfig{}) + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + got, gotErr := parseEncodeConfig(context.Background(), tc.input) + if tc.wantErr != nil { + is.True(gotErr != nil) // expected an error + is.Equal(tc.wantErr.Error(), gotErr.Error()) + + return + } + + is.NoErr(gotErr) + diff := cmp.Diff(tc.want, got, cmpOpts) + if diff != "" { + t.Errorf("mismatch (-want +got): %s", diff) + } + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/avro/decode.go b/pkg/plugin/processor/builtin/impl/avro/decode.go new file mode 100644 index 000000000..17d96ddfe --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/decode.go @@ -0,0 +1,199 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=decode_paramgen.go decodeConfig +//go:generate mockgen -source decode.go -destination=mock_decoder.go -package=avro -mock_names=decoder=MockDecoder . decoder + +package avro + +import ( + "context" + "crypto/tls" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro/schemaregistry" + "github.com/lovromazgon/franz-go/pkg/sr" +) + +type decoder interface { + Decode(ctx context.Context, b opencdc.RawData) (opencdc.StructuredData, error) +} + +type decodeConfig struct { + // The field that will be encoded. + Field string `json:"field" default:".Payload.After"` + + // URL of the schema registry (e.g. http://localhost:8085) + URL string `json:"url" validate:"required"` + + Auth authConfig `json:"auth"` + TLS tlsConfig `json:"tls"` + + fieldResolver sdk.ReferenceResolver +} + +func parseDecodeConfig(ctx context.Context, m map[string]string) (decodeConfig, error) { + cfg := decodeConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, cfg.Parameters()) + if err != nil { + return decodeConfig{}, err + } + + err = cfg.Auth.validate() + if err != nil { + return decodeConfig{}, cerrors.Errorf("invalid basic auth: %w", err) + } + + err = cfg.TLS.parse() + if err != nil { + return decodeConfig{}, cerrors.Errorf("failed parsing TLS: %w", err) + } + + // Parse target field + rr, err := sdk.NewReferenceResolver(cfg.Field) + if err != nil { + return decodeConfig{}, cerrors.Errorf("failed parsing target field: %w", err) + } + cfg.fieldResolver = rr + + return cfg, nil +} + +func (c decodeConfig) ClientOptions() []sr.Opt { + clientOpts := []sr.Opt{sr.URLs(c.URL), sr.Normalize()} + if c.Auth.Username != "" && c.Auth.Password != "" { + clientOpts = append(clientOpts, sr.BasicAuth(c.Auth.Username, c.Auth.Password)) + } + + if c.TLS.tlsClientCert != nil { + tlsCfg := &tls.Config{ + Certificates: []tls.Certificate{*c.TLS.tlsClientCert}, + MinVersion: tls.VersionTLS12, + } + if c.TLS.tlsCACert != nil { + tlsCfg.RootCAs = c.TLS.tlsCACert + } + clientOpts = append(clientOpts, sr.DialTLSConfig(tlsCfg)) + } + + return clientOpts +} + +type decodeProcessor struct { + sdk.UnimplementedProcessor + + logger log.CtxLogger + cfg decodeConfig + decoder decoder +} + +func NewDecodeProcessor(logger log.CtxLogger) sdk.Processor { + return &decodeProcessor{logger: logger} +} + +func (p *decodeProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "avro.decode", + Summary: "Decodes a field's raw data in the Avro format", + Description: `The processor takes raw data (bytes or a string) in the specified field and decodes +it from the [Avro format](https://avro.apache.org/) into structured data. It extracts the schema ID from the data, +downloads the associated schema from the [schema registry](https://docs.confluent.io/platform/current/schema-registry/index.html) +and decodes the payload. The schema is cached locally after it's first downloaded. + +If the processor encounters structured data or the data can't be decoded it returns an error. + +This processor is the counterpart to [` + "`avro.encode`" + `](/docs/processors/builtin/avro.encode).`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: decodeConfig{}.Parameters(), + }, nil +} + +func (p *decodeProcessor) Configure(ctx context.Context, m map[string]string) error { + cfg, err := parseDecodeConfig(ctx, m) + if err != nil { + return cerrors.Errorf("invalid config: %w", err) + } + + p.cfg = cfg + + return nil +} + +func (p *decodeProcessor) Open(ctx context.Context) error { + client, err := schemaregistry.NewClient(p.logger, p.cfg.ClientOptions()...) + if err != nil { + return cerrors.Errorf("could not create schema registry client: %w", err) + } + p.decoder = schemaregistry.NewDecoder(client, p.logger, &sr.Serde{}) + + return nil +} + +func (p *decodeProcessor) Process(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + proc, err := p.processRecord(ctx, rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + + out = append(out, proc) + } + + return out +} + +func (p *decodeProcessor) processRecord(ctx context.Context, rec opencdc.Record) (sdk.ProcessedRecord, error) { + field, err := p.cfg.fieldResolver.Resolve(&rec) + if err != nil { + return nil, cerrors.Errorf("failed resolving field: %w", err) + } + + data, err := p.rawData(field.Get()) + if err != nil { + return nil, cerrors.Errorf("failed getting raw data: %w", err) + } + + rd, err := p.decoder.Decode(ctx, data) + if err != nil { + return nil, cerrors.Errorf("failed encoding data: %w", err) + } + + err = field.Set(rd) + if err != nil { + return nil, cerrors.Errorf("failed setting the decoded value: %w", err) + } + return sdk.SingleRecord(rec), nil +} + +func (p *decodeProcessor) rawData(data any) (opencdc.RawData, error) { + switch v := data.(type) { + case opencdc.RawData: + return v, nil + case []byte: + return v, nil + case string: + return []byte(v), nil + default: + return nil, cerrors.Errorf("unexpected data type %T", v) + } +} + +func (p *decodeProcessor) Teardown(ctx context.Context) error { + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/avro/decode_examples_test.go b/pkg/plugin/processor/builtin/impl/avro/decode_examples_test.go new file mode 100644 index 000000000..74658a335 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/decode_examples_test.go @@ -0,0 +1,117 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build !integration + +package avro + +import ( + "context" + "fmt" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro/schemaregistry" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" + "github.com/lovromazgon/franz-go/pkg/sr" +) + +//nolint:govet // a more descriptive example description +func ExampleDecodeProcessor() { + url, cleanup := schemaregistry.ExampleSchemaRegistryURL("ExampleDecodeProcessor", 54322) + defer cleanup() + + client, err := schemaregistry.NewClient(log.Nop(), sr.URLs(url)) + if err != nil { + panic(fmt.Sprintf("failed to create schema registry client: %v", err)) + } + + _, err = client.CreateSchema(context.Background(), "example-decode", sr.Schema{ + Type: sr.TypeAvro, + Schema: ` +{ + "type":"record", + "name":"record", + "fields":[ + {"name":"myString","type":"string"}, + {"name":"myInt","type":"int"} + ] +}`, + }) + if err != nil { + panic(fmt.Sprintf("failed to create schema: %v", err)) + } + + p := NewDecodeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Decode a record field in Avro format", + Description: `This example shows the usage of the ` + "`avro.decode`" + ` processor. +The processor decodes the record's` + "`.Key`" + ` field using the schema that is +downloaded from the schema registry and needs to exist under the subject` + "`example-decode`" + `. +In this example we use the following schema: + +` + "```json" + ` +{ + "type":"record", + "name":"record", + "fields":[ + {"name":"myString","type":"string"}, + {"name":"myInt","type":"int"} + ] +} +` + "```", + Config: map[string]string{ + "url": url, + "field": ".Key", + }, + Have: opencdc.Record{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.RawData([]byte{0, 0, 0, 0, 1, 6, 98, 97, 114, 2}), + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.StructuredData{ + "myString": "bar", + "myInt": 1, + }, + }}) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,12 +1,15 @@ + // { + // "position": "dGVzdC1wb3NpdGlvbg==", + // "operation": "create", + // "metadata": { + // "key1": "val1" + // }, + // - "key": "\u0000\u0000\u0000\u0000\u0001\u0006bar\u0002", + // + "key": { + // + "myInt": 1, + // + "myString": "bar" + // + }, + // "payload": { + // "before": null, + // "after": null + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/avro/decode_paramgen.go b/pkg/plugin/processor/builtin/impl/avro/decode_paramgen.go new file mode 100644 index 000000000..f8ca9ad3f --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/decode_paramgen.go @@ -0,0 +1,57 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package avro + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (decodeConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "auth.basic.password": { + Default: "", + Description: "The password to use with basic authentication. This option is required if\nauth.basic.username contains a value. If both auth.basic.username and auth.basic.password\nare empty basic authentication is disabled.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "auth.basic.username": { + Default: "", + Description: "The username to use with basic authentication. This option is required if\nauth.basic.password contains a value. If both auth.basic.username and auth.basic.password\nare empty basic authentication is disabled.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "field": { + Default: ".Payload.After", + Description: "The field that will be encoded.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "tls.ca.cert": { + Default: "", + Description: "The path to a file containing PEM encoded CA certificates. If this option is empty,\nConduit falls back to using the host's root CA set.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "tls.client.cert": { + Default: "", + Description: "The path to a file containing a PEM encoded certificate. This option is required\nif tls.client.key contains a value. If both tls.client.cert and tls.client.key are empty\nTLS is disabled.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "tls.client.key": { + Default: "", + Description: "The path to a file containing a PEM encoded private key. This option is required\nif tls.client.cert contains a value. If both tls.client.cert and tls.client.key are empty\nTLS is disabled.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "url": { + Default: "", + Description: "URL of the schema registry (e.g. http://localhost:8085)", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/avro/decode_test.go b/pkg/plugin/processor/builtin/impl/avro/decode_test.go new file mode 100644 index 000000000..94a0a1acb --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/decode_test.go @@ -0,0 +1,89 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package avro + +import ( + "context" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/google/go-cmp/cmp" + "github.com/matryer/is" + "go.uber.org/mock/gomock" +) + +func TestDecodeProcessor_Process_RawData_CustomField(t *testing.T) { + data := `{"field_int": 123}` + testCases := []struct { + name string + field interface{} + }{ + { + name: "opencdc.RawData", + field: opencdc.RawData(data), + }, + { + name: "[]byte", + field: []byte(data), + }, + { + name: "string (base64 encoded byte slice", + field: data, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + ctx := context.Background() + + config := map[string]string{ + "url": "http://localhost", + "field": ".Payload.Before.something", + } + input := opencdc.Record{ + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "something": tc.field, + }, + After: opencdc.RawData("after data"), + }, + } + + decodedVal := opencdc.StructuredData{"decoded": "value"} + want := sdk.SingleRecord(input.Clone()) + want.Payload.Before.(opencdc.StructuredData)["something"] = decodedVal + + underTest := NewDecodeProcessor(log.Nop()) + err := underTest.Configure(ctx, config) + is.NoErr(err) + + // skipping Open(), so we can inject a mock encoder + mockDecoder := NewMockDecoder(gomock.NewController(t)) + mockDecoder.EXPECT(). + Decode(ctx, opencdc.RawData(data)). + Return(decodedVal, nil) + underTest.(*decodeProcessor).decoder = mockDecoder + + got := underTest.Process(ctx, []opencdc.Record{input}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(want, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/avro/encode.go b/pkg/plugin/processor/builtin/impl/avro/encode.go new file mode 100644 index 000000000..8feff22f3 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/encode.go @@ -0,0 +1,233 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=encode_paramgen.go encodeConfig +//go:generate mockgen -source encode.go -destination=mock_encoder.go -package=avro -mock_names=encoder=MockEncoder . encoder + +package avro + +import ( + "context" + "crypto/tls" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro/schemaregistry" + "github.com/goccy/go-json" + "github.com/lovromazgon/franz-go/pkg/sr" +) + +type encoder interface { + Encode(ctx context.Context, sd opencdc.StructuredData) (opencdc.RawData, error) +} + +type encodeConfig struct { + // The field that will be encoded. + Field string `json:"field" default:".Payload.After"` + + // URL of the schema registry (e.g. http://localhost:8085) + URL string `json:"url" validate:"required"` + + Schema schemaConfig `json:"schema"` + Auth authConfig `json:"auth"` + TLS tlsConfig `json:"tls"` + + fieldResolver sdk.ReferenceResolver +} + +func (c encodeConfig) ClientOptions() []sr.Opt { + clientOpts := []sr.Opt{sr.URLs(c.URL), sr.Normalize()} + if c.Auth.Username != "" && c.Auth.Password != "" { + clientOpts = append(clientOpts, sr.BasicAuth(c.Auth.Username, c.Auth.Password)) + } + + if c.TLS.tlsClientCert != nil { + tlsCfg := &tls.Config{ + Certificates: []tls.Certificate{*c.TLS.tlsClientCert}, + MinVersion: tls.VersionTLS12, + } + if c.TLS.tlsCACert != nil { + tlsCfg.RootCAs = c.TLS.tlsCACert + } + clientOpts = append(clientOpts, sr.DialTLSConfig(tlsCfg)) + } + + return clientOpts +} + +func parseEncodeConfig(ctx context.Context, m map[string]string) (encodeConfig, error) { + cfg := encodeConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, cfg.Parameters()) + if err != nil { + return encodeConfig{}, err + } + + err = cfg.Auth.validate() + if err != nil { + return encodeConfig{}, cerrors.Errorf("invalid basic auth: %w", err) + } + + err = cfg.TLS.parse() + if err != nil { + return encodeConfig{}, cerrors.Errorf("failed parsing TLS: %w", err) + } + + err = cfg.Schema.parse() + if err != nil { + return encodeConfig{}, cerrors.Errorf("failed parsing schema strategy: %w", err) + } + + // Parse target field + rr, err := sdk.NewReferenceResolver(cfg.Field) + if err != nil { + return encodeConfig{}, cerrors.Errorf("failed parsing target field: %w", err) + } + cfg.fieldResolver = rr + + return cfg, nil +} + +type encodeProcessor struct { + sdk.UnimplementedProcessor + + logger log.CtxLogger + cfg encodeConfig + encoder encoder +} + +func NewEncodeProcessor(logger log.CtxLogger) sdk.Processor { + return &encodeProcessor{logger: logger} +} + +func (p *encodeProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "avro.encode", + Summary: "Encodes a record's field into the Avro format", + Description: `The processor takes a record's field and encodes it using a schema into the [Avro format](https://avro.apache.org/). +It provides two strategies for determining the schema: + +* **preRegistered** (recommended) + This strategy downloads an existing schema from the schema registry and uses it to encode the record. + This requires the schema to already be registered in the schema registry. The schema is downloaded + only once and cached locally. +* **autoRegister** (for development purposes) + This strategy infers the schema by inspecting the structured data and registers it in the schema + registry. If the record schema is known in advance it's recommended to use the preRegistered strategy + and manually register the schema, as this strategy comes with limitations. + + The strategy uses reflection to traverse the structured data of each record and determine the type + of each field. If a specific field is set to nil the processor won't have enough information to determine + the type and will default to a nullable string. Because of this it is not guaranteed that two records + with the same structure produce the same schema or even a backwards compatible schema. The processor + registers each inferred schema in the schema registry with the same subject, therefore the schema compatibility + checks need to be disabled for this schema to prevent failures. If the schema subject does not exist before running + this processor, it will automatically set the correct compatibility settings in the schema registry. + +This processor is the counterpart to [` + "`avro.decode`" + `](/docs/processors/builtin/avro.decode).`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: encodeConfig{}.Parameters(), + }, nil +} + +func (p *encodeProcessor) Configure(ctx context.Context, m map[string]string) error { + cfg, err := parseEncodeConfig(ctx, m) + if err != nil { + return cerrors.Errorf("invalid config: %w", err) + } + + p.cfg = cfg + + return nil +} + +func (p *encodeProcessor) Open(context.Context) error { + client, err := schemaregistry.NewClient(p.logger, p.cfg.ClientOptions()...) + if err != nil { + return cerrors.Errorf("could not create schema registry client: %w", err) + } + p.encoder = schemaregistry.NewEncoder(client, p.logger, &sr.Serde{}, p.cfg.Schema.strategy) + + return nil +} + +func (p *encodeProcessor) Process(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + proc, err := p.processRecord(ctx, rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + + out = append(out, proc) + } + + return out +} + +func (p *encodeProcessor) processRecord(ctx context.Context, rec opencdc.Record) (sdk.ProcessedRecord, error) { + field, err := p.cfg.fieldResolver.Resolve(&rec) + if err != nil { + return nil, cerrors.Errorf("failed resolving field: %w", err) + } + + data, err := p.structuredData(field.Get()) + if err != nil { + return nil, cerrors.Errorf("failed getting structured data: %w", err) + } + + rd, err := p.encoder.Encode(ctx, data) + if err != nil { + return nil, cerrors.Errorf("failed encoding data: %w", err) + } + + err = field.Set(rd) + if err != nil { + return nil, cerrors.Errorf("failed setting encoded value into the record: %w", err) + } + return sdk.SingleRecord(rec), nil +} + +func (p *encodeProcessor) Teardown(context.Context) error { + return nil +} + +func (p *encodeProcessor) structuredData(data any) (opencdc.StructuredData, error) { + var sd opencdc.StructuredData + switch v := data.(type) { + case opencdc.RawData: + err := json.Unmarshal(v.Bytes(), &sd) + if err != nil { + return nil, cerrors.Errorf("failed unmarshalling JSON from raw data: %w", err) + } + case string: + err := json.Unmarshal([]byte(v), &sd) + if err != nil { + return nil, cerrors.Errorf("failed unmarshalling JSON from raw data: %w", err) + } + case []byte: + err := json.Unmarshal(v, &sd) + if err != nil { + return nil, cerrors.Errorf("failed unmarshalling JSON from raw data: %w", err) + } + case opencdc.StructuredData: + sd = v + default: + return nil, cerrors.Errorf("unexpected data type %T", v) + } + + return sd, nil +} diff --git a/pkg/plugin/processor/builtin/impl/avro/encode_examples_test.go b/pkg/plugin/processor/builtin/impl/avro/encode_examples_test.go new file mode 100644 index 000000000..1dabaaf8e --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/encode_examples_test.go @@ -0,0 +1,200 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build !integration + +package avro + +import ( + "context" + "fmt" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro/schemaregistry" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" + "github.com/lovromazgon/franz-go/pkg/sr" +) + +//nolint:govet // a more descriptive example description +func ExampleEncodeProcessor_autoRegister() { + url, cleanup := schemaregistry.ExampleSchemaRegistryURL("ExampleEncodeProcessor_autoRegister", 54322) + defer cleanup() + + p := NewEncodeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Auto-register schema", + Description: `This example shows the usage of the ` + "`avro.encode`" + ` processor +with the ` + "`autoRegister`" + ` schema strategy. The processor encodes the record's +` + "`.Payload.After`" + ` field using the schema that is extracted from the data +and registered on the fly under the subject ` + "`example-autoRegister`" + `.`, + Config: map[string]string{ + "url": url, + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "example-autoRegister", + }, + Have: opencdc.Record{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "myString": "bar", + "myInt": 1, + "myFloat": 2.3, + "myMap": map[string]any{ + "foo": true, + "bar": 2.2, + }, + "myStruct": opencdc.StructuredData{ + "foo": 1, + "bar": false, + }, + }, + }, + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Payload: opencdc.Change{ + After: opencdc.RawData([]byte{0, 0, 0, 0, 1, 102, 102, 102, 102, 102, 102, 2, 64, 2, 154, 153, 153, 153, 153, 153, 1, 64, 1, 6, 98, 97, 114, 0, 2}), + }, + }}) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,24 +1,12 @@ + // { + // "position": "dGVzdC1wb3NpdGlvbg==", + // "operation": "create", + // "metadata": { + // "key1": "val1" + // }, + // "key": null, + // "payload": { + // "before": null, + // - "after": { + // - "myFloat": 2.3, + // - "myInt": 1, + // - "myMap": { + // - "bar": 2.2, + // - "foo": true + // - }, + // - "myString": "bar", + // - "myStruct": { + // - "bar": false, + // - "foo": 1 + // - } + // - } + // + "after": "\u0000\u0000\u0000\u0000\u0001ffffff\u0002@\u0002\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\u0001@\u0001\u0006bar\u0000\u0002" + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleEncodeProcessor_preRegistered() { + url, cleanup := schemaregistry.ExampleSchemaRegistryURL("ExampleEncodeProcessor_preRegistered", 54322) + defer cleanup() + + client, err := schemaregistry.NewClient(log.Nop(), sr.URLs(url)) + if err != nil { + panic(fmt.Sprintf("failed to create schema registry client: %v", err)) + } + + _, err = client.CreateSchema(context.Background(), "example-preRegistered", sr.Schema{ + Type: sr.TypeAvro, + Schema: ` +{ + "type":"record", + "name":"record", + "fields":[ + {"name":"myString","type":"string"}, + {"name":"myInt","type":"int"} + ] +}`, + }) + if err != nil { + panic(fmt.Sprintf("failed to create schema: %v", err)) + } + + p := NewEncodeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Pre-register schema", + Description: `This example shows the usage of the ` + "`avro.encode`" + ` processor +with the ` + "`preRegistered`" + ` schema strategy. When using this strategy, the +schema has to be manually pre-registered. In this example we use the following schema: + +` + "```json" + ` +{ + "type":"record", + "name":"record", + "fields":[ + {"name":"myString","type":"string"}, + {"name":"myInt","type":"int"} + ] +} +` + "```" + ` + +The processor encodes the record's` + "`.Key`" + ` field using the above schema.`, + Config: map[string]string{ + "url": url, + "schema.strategy": "preRegistered", + "schema.preRegistered.subject": "example-preRegistered", + "schema.preRegistered.version": "1", + "field": ".Key", + }, + Have: opencdc.Record{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.StructuredData{ + "myString": "bar", + "myInt": 1, + }, + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.RawData([]byte{0, 0, 0, 0, 1, 6, 98, 97, 114, 2}), + }}) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,15 +1,12 @@ + // { + // "position": "dGVzdC1wb3NpdGlvbg==", + // "operation": "create", + // "metadata": { + // "key1": "val1" + // }, + // - "key": { + // - "myInt": 1, + // - "myString": "bar" + // - }, + // + "key": "\u0000\u0000\u0000\u0000\u0001\u0006bar\u0002", + // "payload": { + // "before": null, + // "after": null + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/avro/encode_paramgen.go b/pkg/plugin/processor/builtin/impl/avro/encode_paramgen.go new file mode 100644 index 000000000..a8e661952 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/encode_paramgen.go @@ -0,0 +1,86 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package avro + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (encodeConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "auth.basic.password": { + Default: "", + Description: "The password to use with basic authentication. This option is required if\nauth.basic.username contains a value. If both auth.basic.username and auth.basic.password\nare empty basic authentication is disabled.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "auth.basic.username": { + Default: "", + Description: "The username to use with basic authentication. This option is required if\nauth.basic.password contains a value. If both auth.basic.username and auth.basic.password\nare empty basic authentication is disabled.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "field": { + Default: ".Payload.After", + Description: "The field that will be encoded.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "schema.autoRegister.subject": { + Default: "", + Description: "The subject name under which the inferred schema will be registered in the schema registry.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "schema.preRegistered.subject": { + Default: "", + Description: "The subject of the schema in the schema registry used to encode the record.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "schema.preRegistered.version": { + Default: "", + Description: "The version of the schema in the schema registry used to encode the record.", + Type: config.ParameterTypeInt, + Validations: []config.Validation{ + config.ValidationGreaterThan{V: 0}, + }, + }, + "schema.strategy": { + Default: "", + Description: "Strategy to use to determine the schema for the record.\nAvailable strategies are:\n* `preRegistered` (recommended) - Download an existing schema from the schema registry.\n This strategy is further configured with options starting with `schema.preRegistered.*`.\n* `autoRegister` (for development purposes) - Infer the schema from the record and register it\n in the schema registry. This strategy is further configured with options starting with\n `schema.autoRegister.*`.\n\nFor more information about the behavior of each strategy read the main processor description.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + config.ValidationInclusion{List: []string{"preRegistered", "autoRegister"}}, + }, + }, + "tls.ca.cert": { + Default: "", + Description: "The path to a file containing PEM encoded CA certificates. If this option is empty,\nConduit falls back to using the host's root CA set.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "tls.client.cert": { + Default: "", + Description: "The path to a file containing a PEM encoded certificate. This option is required\nif tls.client.key contains a value. If both tls.client.cert and tls.client.key are empty\nTLS is disabled.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "tls.client.key": { + Default: "", + Description: "The path to a file containing a PEM encoded private key. This option is required\nif tls.client.cert contains a value. If both tls.client.cert and tls.client.key are empty\nTLS is disabled.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "url": { + Default: "", + Description: "URL of the schema registry (e.g. http://localhost:8085)", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/avro/encode_test.go b/pkg/plugin/processor/builtin/impl/avro/encode_test.go new file mode 100644 index 000000000..ece621292 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/encode_test.go @@ -0,0 +1,166 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package avro + +import ( + "context" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/google/go-cmp/cmp" + "github.com/matryer/is" + "go.uber.org/mock/gomock" +) + +func TestEncodeProcessor_Process_StructuredData(t *testing.T) { + is := is.New(t) + ctx := context.Background() + + config := map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + } + input := opencdc.Record{ + Position: opencdc.Position("test position"), + Operation: opencdc.OperationCreate, + Metadata: opencdc.Metadata{}, + Key: opencdc.RawData("test key"), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "field_int": 123, + }, + }, + } + want := sdk.SingleRecord(input.Clone()) + want.Payload.After = opencdc.RawData("encoded") + + underTest := NewEncodeProcessor(log.Nop()) + err := underTest.Configure(ctx, config) + is.NoErr(err) + + // skipping Open(), so we can inject a mock encoder + mockEncoder := NewMockEncoder(gomock.NewController(t)) + mockEncoder.EXPECT(). + Encode(ctx, input.Payload.After). + Return(want.Payload.After, nil) + underTest.(*encodeProcessor).encoder = mockEncoder + + got := underTest.Process(ctx, []opencdc.Record{input}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(want, got[0], internal.CmpProcessedRecordOpts...)) +} + +func TestEncodeProcessor_Process_RawData(t *testing.T) { + is := is.New(t) + ctx := context.Background() + + config := map[string]string{ + "url": "http://localhost", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + } + input := opencdc.Record{ + Position: opencdc.Position("test position"), + Operation: opencdc.OperationCreate, + Metadata: opencdc.Metadata{}, + Key: opencdc.RawData("test key"), + Payload: opencdc.Change{ + After: opencdc.RawData(`{"field_int": 123}`), + }, + } + want := sdk.SingleRecord(input.Clone()) + want.Payload.After = opencdc.RawData("encoded") + + underTest := NewEncodeProcessor(log.Nop()) + err := underTest.Configure(ctx, config) + is.NoErr(err) + + // skipping Open(), so we can inject a mock encoder + mockEncoder := NewMockEncoder(gomock.NewController(t)) + mockEncoder.EXPECT(). + Encode(ctx, opencdc.StructuredData{"field_int": float64(123)}). + Return(want.Payload.After, nil) + underTest.(*encodeProcessor).encoder = mockEncoder + + got := underTest.Process(ctx, []opencdc.Record{input}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(want, got[0], internal.CmpProcessedRecordOpts...)) +} + +func TestEncodeProcessor_Process_RawData_CustomField(t *testing.T) { + testCases := []struct { + name string + field interface{} + }{ + { + name: "opencdc.RawData", + field: opencdc.RawData(`{"field_int": 123}`), + }, + { + name: "string", + field: `{"field_int": 123}`, + }, + { + name: "[]byte", + field: []byte(`{"field_int": 123}`), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + ctx := context.Background() + + config := map[string]string{ + "url": "http://localhost", + "field": ".Payload.Before.something", + "schema.strategy": "autoRegister", + "schema.autoRegister.subject": "testsubject", + } + input := opencdc.Record{ + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "something": tc.field, + }, + After: opencdc.RawData("after data"), + }, + } + + encodedValue := opencdc.RawData("encoded") + want := sdk.SingleRecord(input.Clone()) + want.Payload.Before.(opencdc.StructuredData)["something"] = encodedValue + + underTest := NewEncodeProcessor(log.Nop()) + err := underTest.Configure(ctx, config) + is.NoErr(err) + + // skipping Open(), so we can inject a mock encoder + mockEncoder := NewMockEncoder(gomock.NewController(t)) + mockEncoder.EXPECT(). + Encode(ctx, opencdc.StructuredData{"field_int": float64(123)}). + Return(encodedValue, nil) + underTest.(*encodeProcessor).encoder = mockEncoder + + got := underTest.Process(ctx, []opencdc.Record{input}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(want, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/avro/examples_exporter_test.go b/pkg/plugin/processor/builtin/impl/avro/examples_exporter_test.go new file mode 100644 index 000000000..666eda3ad --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/examples_exporter_test.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build export_processors + +package avro + +import ( + "os" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +func TestMain(m *testing.M) { + code := m.Run() + if code > 0 { + os.Exit(code) + } + + // tests passed, export the processors + exampleutil.ExportProcessors() +} diff --git a/pkg/plugin/processor/builtin/impl/avro/examples_test.go b/pkg/plugin/processor/builtin/impl/avro/examples_test.go new file mode 100644 index 000000000..22b23b1f4 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/examples_test.go @@ -0,0 +1,17 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go test -count=1 -tags export_processors . + +package avro diff --git a/pkg/plugin/processor/builtin/impl/avro/mock_decoder.go b/pkg/plugin/processor/builtin/impl/avro/mock_decoder.go new file mode 100644 index 000000000..fb21f11be --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/mock_decoder.go @@ -0,0 +1,56 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: decode.go +// +// Generated by this command: +// +// mockgen -source decode.go -destination=mock_decoder.go -package=avro -mock_names=decoder=MockDecoder . decoder +// + +// Package avro is a generated GoMock package. +package avro + +import ( + context "context" + reflect "reflect" + + opencdc "github.com/conduitio/conduit-commons/opencdc" + gomock "go.uber.org/mock/gomock" +) + +// MockDecoder is a mock of decoder interface. +type MockDecoder struct { + ctrl *gomock.Controller + recorder *MockDecoderMockRecorder +} + +// MockDecoderMockRecorder is the mock recorder for MockDecoder. +type MockDecoderMockRecorder struct { + mock *MockDecoder +} + +// NewMockDecoder creates a new mock instance. +func NewMockDecoder(ctrl *gomock.Controller) *MockDecoder { + mock := &MockDecoder{ctrl: ctrl} + mock.recorder = &MockDecoderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDecoder) EXPECT() *MockDecoderMockRecorder { + return m.recorder +} + +// Decode mocks base method. +func (m *MockDecoder) Decode(ctx context.Context, b opencdc.RawData) (opencdc.StructuredData, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Decode", ctx, b) + ret0, _ := ret[0].(opencdc.StructuredData) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Decode indicates an expected call of Decode. +func (mr *MockDecoderMockRecorder) Decode(ctx, b any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Decode", reflect.TypeOf((*MockDecoder)(nil).Decode), ctx, b) +} diff --git a/pkg/plugin/processor/builtin/impl/avro/mock_encoder.go b/pkg/plugin/processor/builtin/impl/avro/mock_encoder.go new file mode 100644 index 000000000..88f011d15 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/mock_encoder.go @@ -0,0 +1,56 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: encode.go +// +// Generated by this command: +// +// mockgen -source encode.go -destination=mock_encoder.go -package=avro -mock_names=encoder=MockEncoder . encoder +// + +// Package avro is a generated GoMock package. +package avro + +import ( + context "context" + reflect "reflect" + + opencdc "github.com/conduitio/conduit-commons/opencdc" + gomock "go.uber.org/mock/gomock" +) + +// MockEncoder is a mock of encoder interface. +type MockEncoder struct { + ctrl *gomock.Controller + recorder *MockEncoderMockRecorder +} + +// MockEncoderMockRecorder is the mock recorder for MockEncoder. +type MockEncoderMockRecorder struct { + mock *MockEncoder +} + +// NewMockEncoder creates a new mock instance. +func NewMockEncoder(ctrl *gomock.Controller) *MockEncoder { + mock := &MockEncoder{ctrl: ctrl} + mock.recorder = &MockEncoderMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockEncoder) EXPECT() *MockEncoderMockRecorder { + return m.recorder +} + +// Encode mocks base method. +func (m *MockEncoder) Encode(ctx context.Context, sd opencdc.StructuredData) (opencdc.RawData, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Encode", ctx, sd) + ret0, _ := ret[0].(opencdc.RawData) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Encode indicates an expected call of Encode. +func (mr *MockEncoderMockRecorder) Encode(ctx, sd any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Encode", reflect.TypeOf((*MockEncoder)(nil).Encode), ctx, sd) +} diff --git a/pkg/processor/schemaregistry/avro/extractor.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/extractor.go similarity index 97% rename from pkg/processor/schemaregistry/avro/extractor.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/extractor.go index 7006954bc..e331a5570 100644 --- a/pkg/processor/schemaregistry/avro/extractor.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/extractor.go @@ -19,13 +19,13 @@ import ( "reflect" "strings" + "github.com/conduitio/conduit-commons/opencdc" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/record" "github.com/hamba/avro/v2" ) var ( - structuredDataType = reflect.TypeOf(record.StructuredData{}) + structuredDataType = reflect.TypeOf(opencdc.StructuredData{}) byteType = reflect.TypeOf(byte(0)) ) @@ -51,7 +51,7 @@ type extractor struct{} // will traverse all values in the slice, extract their types and combine // them in a union type. If the slice is empty, the extracted value type // will default to a nullable string (union type of string and null). -// - If Extract encounters a value with the type of record.StructuredData it +// - If Extract encounters a value with the type of opencdc.StructuredData it // will treat it as a record and extract a record schema, where each key in // the structured data is extracted into its own record field. func (e extractor) Extract(v any) (avro.Schema, error) { @@ -195,7 +195,7 @@ func (e extractor) extractSlice(path []string, v reflect.Value, t reflect.Type) // type is an interface it falls back to looping through all values, extracting // their types and combining them into a nullable union schema. // If the key of the map is not a string, this function returns an error. If the -// type of the map is record.StructuredData it will treat it as a record and +// type of the map is opencdc.StructuredData it will treat it as a record and // extract a record schema, where each key in the structured data is extracted // into its own record field. func (e extractor) extractMap(path []string, v reflect.Value, t reflect.Type) (avro.Schema, error) { diff --git a/pkg/processor/schemaregistry/avro/schema.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/schema.go similarity index 100% rename from pkg/processor/schemaregistry/avro/schema.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/schema.go diff --git a/pkg/processor/schemaregistry/avro/schema_test.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/schema_test.go similarity index 96% rename from pkg/processor/schemaregistry/avro/schema_test.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/schema_test.go index c68acf741..05b121e3d 100644 --- a/pkg/processor/schemaregistry/avro/schema_test.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/schema_test.go @@ -18,8 +18,8 @@ import ( "fmt" "testing" + "github.com/conduitio/conduit-commons/opencdc" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/record" "github.com/hamba/avro/v2" "github.com/matryer/is" ) @@ -273,8 +273,8 @@ func TestSchema_MarshalUnmarshal(t *testing.T) { }))), }))), }, { - name: "record.StructuredData", - haveValue: record.StructuredData{ + name: "opencdc.StructuredData", + haveValue: opencdc.StructuredData{ "foo": "bar", "bar": 1, "baz": []int{1, 2, 3}, @@ -295,8 +295,8 @@ func TestSchema_MarshalUnmarshal(t *testing.T) { )), }} - newRecord := func(v any) record.StructuredData { - return record.StructuredData{"foo": v} + newRecord := func(v any) opencdc.StructuredData { + return opencdc.StructuredData{"foo": v} } for _, tc := range testCases { @@ -324,7 +324,7 @@ func TestSchema_MarshalUnmarshal(t *testing.T) { is.NoErr(err) // unmarshal the bytes back into structured data and compare the value - var gotValue record.StructuredData + var gotValue opencdc.StructuredData err = gotSchema.Unmarshal(bytes, &gotValue) is.NoErr(err) @@ -337,13 +337,13 @@ func TestSchema_MarshalUnmarshal(t *testing.T) { func TestSchemaForType_NestedStructuredData(t *testing.T) { is := is.New(t) - have := record.StructuredData{ + have := opencdc.StructuredData{ "foo": "bar", - "level1": record.StructuredData{ + "level1": opencdc.StructuredData{ "foo": "bar", - "level2": record.StructuredData{ + "level2": opencdc.StructuredData{ "foo": "bar", - "level3": record.StructuredData{ + "level3": opencdc.StructuredData{ "foo": "bar", "regularMap": map[string]bool{}, }, @@ -394,7 +394,7 @@ func TestSchemaForType_NestedStructuredData(t *testing.T) { is.NoErr(err) // only try to unmarshal to ensure there's no error, other tests assert that // umarshaled data matches the expectations - var unmarshaled record.StructuredData + var unmarshaled opencdc.StructuredData err = got.Unmarshal(bytes, &unmarshaled) is.NoErr(err) } diff --git a/pkg/processor/schemaregistry/avro/traverse.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/traverse.go similarity index 98% rename from pkg/processor/schemaregistry/avro/traverse.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/traverse.go index 9faf7ca11..7beb0c427 100644 --- a/pkg/processor/schemaregistry/avro/traverse.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/traverse.go @@ -19,8 +19,8 @@ import ( "reflect" "sort" + "github.com/conduitio/conduit-commons/opencdc" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/record" "github.com/hamba/avro/v2" ) @@ -109,11 +109,11 @@ func traverseValue(val any, p path, hasEncodedUnions bool, fn func(v any)) error switch val := val.(type) { case map[string]any: return traverse(val[l.field.Name()], index+1) - case record.StructuredData: + case opencdc.StructuredData: return traverse(val[l.field.Name()], index+1) case *map[string]any: return traverse(*val, index) // traverse value - case *record.StructuredData: + case *opencdc.StructuredData: return traverse(*val, index) // traverse value } return newUnexpectedTypeError(avro.Record, map[string]any{}, val) diff --git a/pkg/processor/schemaregistry/avro/union.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/union.go similarity index 100% rename from pkg/processor/schemaregistry/avro/union.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/union.go diff --git a/pkg/processor/schemaregistry/avro/union_test.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/union_test.go similarity index 95% rename from pkg/processor/schemaregistry/avro/union_test.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/union_test.go index 48ae3f0ef..eea56a1e6 100644 --- a/pkg/processor/schemaregistry/avro/union_test.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro/union_test.go @@ -18,7 +18,7 @@ import ( "reflect" "testing" - "github.com/conduitio/conduit/pkg/record" + "github.com/conduitio/conduit-commons/opencdc" "github.com/matryer/is" ) @@ -84,8 +84,8 @@ func TestUnionResolver(t *testing.T) { t.Run(tc.name, func(t *testing.T) { is := is.New(t) - newRecord := func() record.StructuredData { - sd := record.StructuredData{ + newRecord := func() opencdc.StructuredData { + sd := opencdc.StructuredData{ "foo1": tc.have, "map1": map[string]any{ "foo2": tc.have, @@ -100,7 +100,7 @@ func TestUnionResolver(t *testing.T) { } return sd } - want := record.StructuredData{ + want := opencdc.StructuredData{ "foo1": tc.have, // normal field shouldn't change "map1": map[string]any{ "foo2": tc.want, diff --git a/pkg/processor/schemaregistry/client.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/client.go similarity index 98% rename from pkg/processor/schemaregistry/client.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/client.go index 9bf9edd81..5e4036772 100644 --- a/pkg/processor/schemaregistry/client.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/client.go @@ -19,7 +19,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/processor/schemaregistry/internal" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal" "github.com/lovromazgon/franz-go/pkg/sr" ) diff --git a/pkg/processor/schemaregistry/client_test.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/client_test.go similarity index 98% rename from pkg/processor/schemaregistry/client_test.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/client_test.go index b1bc8a7f9..b5e407699 100644 --- a/pkg/processor/schemaregistry/client_test.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/client_test.go @@ -36,7 +36,7 @@ func TestClient_NotFound(t *testing.T) { c, err := NewClient( logger, sr.HTTPClient(&http.Client{Transport: rtr}), - sr.URLs(testSchemaRegistryURL(t)), + sr.URLs(TestSchemaRegistryURL(t)), ) is.NoErr(err) @@ -95,7 +95,7 @@ func TestClient_CacheMiss(t *testing.T) { // register schema in the schema registry but not in the client, to get a // cache miss but fetch from registry should return the schema - srClient, err := sr.NewClient(sr.URLs(testSchemaRegistryURL(t))) + srClient, err := sr.NewClient(sr.URLs(TestSchemaRegistryURL(t))) is.NoErr(err) want, err := srClient.CreateSchema(ctx, "test-cache-miss", sr.Schema{ Schema: `"string"`, @@ -109,7 +109,7 @@ func TestClient_CacheMiss(t *testing.T) { c, err := NewClient( logger, sr.HTTPClient(&http.Client{Transport: rtr}), - sr.URLs(testSchemaRegistryURL(t)), + sr.URLs(TestSchemaRegistryURL(t)), ) is.NoErr(err) @@ -180,7 +180,7 @@ func TestClient_CacheHit(t *testing.T) { c, err := NewClient( logger, sr.HTTPClient(&http.Client{Transport: rtr}), - sr.URLs(testSchemaRegistryURL(t)), + sr.URLs(TestSchemaRegistryURL(t)), ) is.NoErr(err) diff --git a/pkg/processor/schemaregistry/decoder.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/decoder.go similarity index 81% rename from pkg/processor/schemaregistry/decoder.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/decoder.go index 14dbd2165..0555d458c 100644 --- a/pkg/processor/schemaregistry/decoder.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/decoder.go @@ -17,9 +17,9 @@ package schemaregistry import ( "context" + "github.com/conduitio/conduit-commons/opencdc" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/record" "github.com/lovromazgon/franz-go/pkg/sr" ) @@ -37,16 +37,16 @@ func NewDecoder(client *Client, logger log.CtxLogger, serde *sr.Serde) *Decoder } } -func (d *Decoder) Decode(ctx context.Context, b record.RawData) (record.StructuredData, error) { - var out record.StructuredData - err := d.serde.Decode(b.Raw, &out) +func (d *Decoder) Decode(ctx context.Context, b opencdc.RawData) (opencdc.StructuredData, error) { + var out opencdc.StructuredData + err := d.serde.Decode(b.Bytes(), &out) if cerrors.Is(err, sr.ErrNotRegistered) { err = d.findAndRegisterSchema(ctx, b) if err != nil { return nil, err } // retry decoding - err = d.serde.Decode(b.Raw, &out) + err = d.serde.Decode(b.Bytes(), &out) } if err != nil { return nil, cerrors.Errorf("failed to decode raw data: %w", err) @@ -55,8 +55,8 @@ func (d *Decoder) Decode(ctx context.Context, b record.RawData) (record.Structur return out, nil } -func (d *Decoder) findAndRegisterSchema(ctx context.Context, b record.RawData) error { - id, _, _ := d.serde.Header().DecodeID(b.Raw) // we know this won't throw an error since Decode didn't return ErrBadHeader +func (d *Decoder) findAndRegisterSchema(ctx context.Context, b opencdc.RawData) error { + id, _, _ := d.serde.Header().DecodeID(b.Bytes()) // we know this won't throw an error since Decode didn't return ErrBadHeader s, err := d.client.SchemaByID(ctx, id) if err != nil { return cerrors.Errorf("failed to get schema: %w", err) @@ -72,7 +72,7 @@ func (d *Decoder) findAndRegisterSchema(ctx context.Context, b record.RawData) e d.serde.Register( id, - record.StructuredData{}, + opencdc.StructuredData{}, sr.EncodeFn(encodeFn(schema, sr.SubjectSchema{ID: id})), sr.DecodeFn(decodeFn(schema, sr.SubjectSchema{ID: id})), ) diff --git a/pkg/processor/schemaregistry/encoder.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/encoder.go similarity index 84% rename from pkg/processor/schemaregistry/encoder.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/encoder.go index 9361699e5..6eef353db 100644 --- a/pkg/processor/schemaregistry/encoder.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/encoder.go @@ -17,9 +17,9 @@ package schemaregistry import ( "context" + "github.com/conduitio/conduit-commons/opencdc" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/record" "github.com/lovromazgon/franz-go/pkg/sr" ) @@ -32,7 +32,7 @@ type Encoder struct { } type SchemaStrategy interface { - GetSchema(context.Context, *Client, log.CtxLogger, record.StructuredData) (Schema, sr.SubjectSchema, error) + GetSchema(context.Context, *Client, log.CtxLogger, opencdc.StructuredData) (Schema, sr.SubjectSchema, error) } func NewEncoder(client *Client, logger log.CtxLogger, serde *sr.Serde, strategy SchemaStrategy) *Encoder { @@ -44,10 +44,10 @@ func NewEncoder(client *Client, logger log.CtxLogger, serde *sr.Serde, strategy } } -func (e *Encoder) Encode(ctx context.Context, sd record.StructuredData) (record.RawData, error) { +func (e *Encoder) Encode(ctx context.Context, sd opencdc.StructuredData) (opencdc.RawData, error) { s, ss, err := e.GetSchema(ctx, e.client, e.logger, sd) if err != nil { - return record.RawData{}, cerrors.Errorf("failed to get schema: %w", err) + return opencdc.RawData{}, cerrors.Errorf("failed to get schema: %w", err) } b, err := e.serde.Encode(sd, sr.ID(ss.ID)) @@ -55,7 +55,7 @@ func (e *Encoder) Encode(ctx context.Context, sd record.StructuredData) (record. // TODO note that we need to register specific indexes when adding support for protobuf e.serde.Register( ss.ID, - record.StructuredData{}, + opencdc.StructuredData{}, sr.EncodeFn(encodeFn(s, ss)), sr.DecodeFn(decodeFn(s, ss)), ) @@ -64,9 +64,9 @@ func (e *Encoder) Encode(ctx context.Context, sd record.StructuredData) (record. b, err = e.serde.Encode(sd, sr.ID(ss.ID)) } if err != nil { - return record.RawData{}, cerrors.Errorf("failed to encode data: %w", err) + return opencdc.RawData{}, cerrors.Errorf("failed to encode data: %w", err) } - return record.RawData{Raw: b}, nil + return opencdc.RawData(b), nil } type ExtractAndUploadSchemaStrategy struct { @@ -74,7 +74,7 @@ type ExtractAndUploadSchemaStrategy struct { Subject string } -func (str ExtractAndUploadSchemaStrategy) GetSchema(ctx context.Context, client *Client, _ log.CtxLogger, sd record.StructuredData) (Schema, sr.SubjectSchema, error) { +func (str ExtractAndUploadSchemaStrategy) GetSchema(ctx context.Context, client *Client, _ log.CtxLogger, sd opencdc.StructuredData) (Schema, sr.SubjectSchema, error) { sf, ok := DefaultSchemaFactories[str.Type] if !ok { return nil, sr.SubjectSchema{}, cerrors.Errorf("unknown schema type %q (%d)", str.Type.String(), str.Type) @@ -103,7 +103,7 @@ type DownloadSchemaStrategy struct { Version int } -func (str DownloadSchemaStrategy) GetSchema(ctx context.Context, client *Client, _ log.CtxLogger, _ record.StructuredData) (Schema, sr.SubjectSchema, error) { +func (str DownloadSchemaStrategy) GetSchema(ctx context.Context, client *Client, _ log.CtxLogger, _ opencdc.StructuredData) (Schema, sr.SubjectSchema, error) { // fetch schema from registry ss, err := client.SchemaBySubjectVersion(ctx, str.Subject, str.Version) if err != nil { diff --git a/pkg/processor/schemaregistry/encoder_test.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/encoder_test.go similarity index 88% rename from pkg/processor/schemaregistry/encoder_test.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/encoder_test.go index 9200a0d19..a0cd4dd0a 100644 --- a/pkg/processor/schemaregistry/encoder_test.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/encoder_test.go @@ -18,8 +18,8 @@ import ( "context" "testing" + "github.com/conduitio/conduit-commons/opencdc" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/record" "github.com/lovromazgon/franz-go/pkg/sr" "github.com/matryer/is" ) @@ -30,10 +30,10 @@ func TestEncodeDecode_ExtractAndUploadSchemaStrategy(t *testing.T) { logger := log.Nop() var serde sr.Serde - client, err := NewClient(logger, sr.URLs(testSchemaRegistryURL(t))) + client, err := NewClient(logger, sr.URLs(TestSchemaRegistryURL(t))) is.NoErr(err) - have := record.StructuredData{ + have := opencdc.StructuredData{ "myString": "bar", "myInt": 1, "myFloat": 2.3, @@ -41,13 +41,13 @@ func TestEncodeDecode_ExtractAndUploadSchemaStrategy(t *testing.T) { "foo": true, "bar": 2.2, }, - "myStruct": record.StructuredData{ + "myStruct": opencdc.StructuredData{ "foo": 1, "bar": false, }, "mySlice": []int{1, 2, 3}, } - want := record.StructuredData{ + want := opencdc.StructuredData{ "myString": "bar", "myInt": 1, "myFloat": 2.3, @@ -88,14 +88,14 @@ func TestEncodeDecode_DownloadStrategy_Avro(t *testing.T) { logger := log.Nop() var serde sr.Serde - client, err := NewClient(logger, sr.URLs(testSchemaRegistryURL(t))) + client, err := NewClient(logger, sr.URLs(TestSchemaRegistryURL(t))) is.NoErr(err) - have := record.StructuredData{ + have := opencdc.StructuredData{ "myString": "bar", "myInt": 1, } - want := record.StructuredData{ + want := opencdc.StructuredData{ "myString": "bar", "myInt": 1, } diff --git a/pkg/processor/schemaregistry/client_fake_test.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/fake.go similarity index 84% rename from pkg/processor/schemaregistry/client_fake_test.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/fake.go index 594b32283..149e5c6cc 100644 --- a/pkg/processor/schemaregistry/client_fake_test.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/fake.go @@ -17,6 +17,8 @@ package schemaregistry import ( + "fmt" + "net" "net/http" "net/http/httptest" "strconv" @@ -25,7 +27,7 @@ import ( "testing" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor/schemaregistry/internal" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal" "github.com/goccy/go-json" "github.com/lovromazgon/franz-go/pkg/sr" ) @@ -35,24 +37,56 @@ var ( fakeServerByTestLock sync.Mutex ) -// testSchemaRegistryURL creates a fake in-memory schema registry server and -// returns its address. This method is only used if the tests are run without +// ExampleSchemaRegistryURL creates a fake in-memory schema registry server and +// returns its address and a cleanup function which should be executed in a +// deferred call. +// +// This method is only used if examples are run without --tags=integration. It +// is meant as a utility to allow faster iteration when developing, please run +// integration tests to ensure the code works with a real schema registry. +func ExampleSchemaRegistryURL(exampleName string, port int) (string, func()) { + // discard all schema registry logs in examples + logf := func(_ string, _ ...any) {} + return fakeSchemaRegistryURL(exampleName, logf, port) +} + +// TestSchemaRegistryURL creates a fake in-memory schema registry server and +// returns its address. +// +// This method is only used if the tests are run without // --tags=integration. It is meant as a utility to allow faster iteration when // developing, please run integration tests to ensure the code works with a real // schema registry. -func testSchemaRegistryURL(t *testing.T) string { +func TestSchemaRegistryURL(t testing.TB) string { + url, cleanup := fakeSchemaRegistryURL(t.Name(), t.Logf, 0) + t.Cleanup(cleanup) + return url +} + +func fakeSchemaRegistryURL(name string, logf func(format string, args ...any), port int) (string, func()) { fakeServerByTestLock.Lock() defer fakeServerByTestLock.Unlock() - srv := fakeServerByTest[t.Name()] + srv := fakeServerByTest[name] + cleanup := func() {} if srv == nil { - srv = httptest.NewServer(newFakeServer(t.Logf)) - fakeServerByTest[t.Name()] = srv - t.Cleanup(func() { - srv.Close() - }) + srv = httptest.NewUnstartedServer(newFakeServer(logf)) + if port > 0 { + // NewUnstartedServer creates a listener. Close that listener and replace + // with a custom one. + _ = srv.Listener.Close() + l, err := net.Listen("tcp", fmt.Sprintf("127.0.0.1:%d", port)) + if err != nil { + panic(fmt.Sprintf("failed starting test server on port %d: %v", port, err)) + } + srv.Listener = l + } + + srv.Start() + fakeServerByTest[name] = srv + cleanup = srv.Close } - return srv.URL + return srv.URL, cleanup } const ( diff --git a/pkg/processor/schemaregistry/client_integration_test.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/integration.go similarity index 66% rename from pkg/processor/schemaregistry/client_integration_test.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/integration.go index 7627a4fd8..774f4828e 100644 --- a/pkg/processor/schemaregistry/client_integration_test.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/integration.go @@ -18,10 +18,17 @@ package schemaregistry import "testing" -// testSchemaRegistryURL points to the schema registry defined in +// ExampleSchemaRegistryURL points to the schema registry defined in // /test/docker-compose-schemaregistry.yml. // This method is only used if the tests are run with --tags=integration. -func testSchemaRegistryURL(t *testing.T) string { +func ExampleSchemaRegistryURL(exampleName string, port int) (string, func()) { + return "localhost:8085", func() {} +} + +// TestSchemaRegistryURL points to the schema registry defined in +// /test/docker-compose-schemaregistry.yml. +// This method is only used if the tests are run with --tags=integration. +func TestSchemaRegistryURL(t testing.TB) string { t.Log("Using real schema registry server") return "localhost:8085" } diff --git a/pkg/processor/schemaregistry/internal/cache.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal/cache.go similarity index 100% rename from pkg/processor/schemaregistry/internal/cache.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal/cache.go diff --git a/pkg/processor/schemaregistry/internal/cache_test.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal/cache_test.go similarity index 100% rename from pkg/processor/schemaregistry/internal/cache_test.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal/cache_test.go diff --git a/pkg/processor/schemaregistry/internal/rabin.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal/rabin.go similarity index 100% rename from pkg/processor/schemaregistry/internal/rabin.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal/rabin.go diff --git a/pkg/processor/schemaregistry/internal/rabin_test.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal/rabin_test.go similarity index 100% rename from pkg/processor/schemaregistry/internal/rabin_test.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/internal/rabin_test.go diff --git a/pkg/processor/schemaregistry/schema.go b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/schema.go similarity index 94% rename from pkg/processor/schemaregistry/schema.go rename to pkg/plugin/processor/builtin/impl/avro/schemaregistry/schema.go index 69b276d86..cd00d1df2 100644 --- a/pkg/processor/schemaregistry/schema.go +++ b/pkg/plugin/processor/builtin/impl/avro/schemaregistry/schema.go @@ -15,7 +15,7 @@ package schemaregistry import ( - "github.com/conduitio/conduit/pkg/processor/schemaregistry/avro" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro/schemaregistry/avro" "github.com/lovromazgon/franz-go/pkg/sr" ) diff --git a/pkg/plugin/processor/builtin/impl/avro/testdata/ca-key.pem b/pkg/plugin/processor/builtin/impl/avro/testdata/ca-key.pem new file mode 100644 index 000000000..cd5e0e6ce --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/testdata/ca-key.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAwgjVL+kEWMm2jicnhYH3ey9u5ZewzQhwOOYT/1L9a9cGdSOJ +69opCooJp/i4Ljp3lxe4W9aF7l+sPH+lIbdNCEDhIDT6w2mrMDP0dHdBfNlMPhzI +rksZ35FDFztDzHt0I80Ys3m3UdWqYuMfcr0cfbGcCvkszlRahgQnQtsZuwtcJ7aB +YLgQ+0MPWhy94bUwykl2tJ7uPwdI9DPgVTkHc+ujGsMqonDj2ItxZzgik7FhcTWo +cDPXWkccJOoZN+YRSR5UVnFzPIZ5B8hohEwtAQiTDMaQCiCLKE0UUfcslb8i9Dv9 +iWzFBQHDX989pzdRej/sbDY9D5gJCXxAtZBlrwIDAQABAoIBABR3nA1Gtherh18t +UCTsFa/fzAHZ42NtTXEjC9714Rd8AglfK1A+ne+nqecW/E6cCjtc3CyLz2ramq9z +A3cv/6csONMN45mzTsRx5opWfgjuXvV6UczYzSzhBMOxk7BBIXoZS067OxQdxrBR +puKSgivcDUq3B/vnt/jE/WM2+1rYY3lo79nnGrc71YiFba9gAOPduOdGq1XAHTB7 +ZVRhHqXHics5IwwIWiT5MHPU/rjkJYyEAtEClBPncIpiteytaIHn6n46vY8Mq+6o +Io4me5OOZccJjImU94Ev22am7Xf809fzwS6gIlUArsGuOEue+dqoIYH0awl1u9Cx +6B0TaSkCgYEA/uyRph6UFqYr3k3bmR8qHITMIi9oaT8lYJRmlz15mrcjFn4Ss/Qx +TbJ8zByzmrdD2kW2JTgvLzkFrMkcD3MY6guWYCLfRNY9bBHiYUbLs1QUYKqbXHgi +5VE/NVl5JytRXtNVLHDerjAnQKjAMmoq63n8n7EUnThpWRa8nPVzSJMCgYEAwtp5 +3x2Xh+RYhiuoo7Y5J8FDiEDxP2bG4HbjU4tFG63HV9BQZLbJtUe+YdsJKFxNlySg +Ymhw5aifz/GWs4ttTva1+DILSF3VZsANT6BpfR48GZfaRBThDl/yaKK2+K4THmt/ +kUa4ycztoLwMHyFgLvxNEevnohElO+mAP54P6/UCgYEAulL03eNJwAgy1ig4EoIJ +yJBzl6To/swNqLqvBWoEYMXsmnsJdohMJQsJVjLAP5wltvru9+NJ8Y0PDcxsewpY +MnDHt3qlj+27NSsZOJqDNo6E2ma4aWzRrKSmu3qrJ6rsrY9vmKzixZQijIsK945L +0pL1x06pz4Q0083fjCWG3osCgYEAwHUAaOKWl89vrF4U/FZwz5ZxOHRElIXUK0Li +N9NYWAVl3pNPxJpSUq5hGdA5lFbPAFlZ3LkhwYSzDLWnTwGI3rSJeXMcd8bOj+Vj +gGGFr+IcJH87Q4GH5SXZjYYhyTBOi/5Litdaj1/M2bTDGEBj+ySSjv/90rKynyF7 +3domCb0CgYBty0fR1xfJ0QrtyvvKiT8lUZFZk/15oA72iDrmzDh5JgwOP8ueYL7T +rcA9GN6LPtbcdrq9GHzWRo2CfaVBfX70urxrxAP51USYyta6lEcNrwpLGEe7ig/D +hOZacaaxGIbbCGR+U3zmSJu28apaPFlKX/nLHB8pzNv87KTFJT92pQ== +-----END RSA PRIVATE KEY----- diff --git a/pkg/plugin/processor/builtin/impl/avro/testdata/ca.pem b/pkg/plugin/processor/builtin/impl/avro/testdata/ca.pem new file mode 100644 index 000000000..3b474439c --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/testdata/ca.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDazCCAlOgAwIBAgIUWCxb55nS31zoq5B0P/rurLKe+5gwDQYJKoZIhvcNAQEL +BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNDAzMDIxNDMwNTRaFw0zNDAx +MDkxNDMwNTRaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw +HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDCCNUv6QRYybaOJyeFgfd7L27ll7DNCHA45hP/Uv1r +1wZ1I4nr2ikKigmn+LguOneXF7hb1oXuX6w8f6Uht00IQOEgNPrDaaswM/R0d0F8 +2Uw+HMiuSxnfkUMXO0PMe3QjzRizebdR1api4x9yvRx9sZwK+SzOVFqGBCdC2xm7 +C1wntoFguBD7Qw9aHL3htTDKSXa0nu4/B0j0M+BVOQdz66MawyqicOPYi3FnOCKT +sWFxNahwM9daRxwk6hk35hFJHlRWcXM8hnkHyGiETC0BCJMMxpAKIIsoTRRR9yyV +vyL0O/2JbMUFAcNf3z2nN1F6P+xsNj0PmAkJfEC1kGWvAgMBAAGjUzBRMB0GA1Ud +DgQWBBT4upcDy6UnNcEdHzmPtTuSVfR7YTAfBgNVHSMEGDAWgBT4upcDy6UnNcEd +HzmPtTuSVfR7YTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQAN +gbu+AKS7uuOsS6TNgZAzT+Gd6uUxFh0fRtiZTUDtzZ/PsDSwwt7llx7msLv0/U9Q +iP7quphFRNzmaAmhiBToOspMm0IJsOxudGy5ZZ/Ik9HwetYT9SYYKsOkt62UJDcj +CjGWjZooINkFBQpBp5Ep+gUG9u9bhAbPmH/uz6I5jc8hif8F/HQRpUUYH6nnHqcX +FG9p2ONLaRKtl2wMLYohokyn9LMncOQA+AfgPJvCZunMBUjFCOLeKVpDj0BGRypd +xKtEtzzofNxHZlulRDmHotLCP/uhC+oP5ERbQ84QX8OU7tddARKlbwCyD77BVIxy +ms8GxNr9s5sqQcmiEv9b +-----END CERTIFICATE----- diff --git a/pkg/plugin/processor/builtin/impl/avro/testdata/cert.pem b/pkg/plugin/processor/builtin/impl/avro/testdata/cert.pem new file mode 100644 index 000000000..4a2a9d6ec --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/avro/testdata/cert.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDazCCAlOgAwIBAgIUaYEGU8BNEpdCWCDe4bZTN0w/HSswDQYJKoZIhvcNAQEL +BQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDAeFw0yNDAzMDIxNDM0NTZaFw0zNDAy +MjgxNDM0NTZaMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEw +HwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQCo4KHMW8tHL/y611vI2DKNl+4h12s6Y1H4S4So6cS2 +I8ep6cX74YK6w8mhRn2nhWxrNCxtYt9Qv6C0gh38HTMKI5NU5hCy+p1WclLjaYFS +C7ZRP2ZoU330qsnoRO2/3a6MNNMZf3c8FsCNRRsJYKZuEoH77SY/HLYMxdtHDZYz +ct/tCvEBkI8QGaHmEYQNYLez2Pdv9zgvzWcFBArwk+MhHpo1T30vQd6iw27dY7w0 +1WWX/h9wWlheq/x8DeAT2hzosOIBsJEAmm9UebBIeI+d3lt0V7w47odV5hhwu3Zj +V24ITv9AXfV5EEX7nTWN8Ld5CqN6EhcS1N2ZmmE2M7ejAgMBAAGjUzBRMB0GA1Ud +DgQWBBR+cc88tYulY/UqUce/BOVPbd9R3TAfBgNVHSMEGDAWgBR+cc88tYulY/Uq +Uce/BOVPbd9R3TAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQCd +Aqz3+wmWIWTrhdMgZ20LdwG9rOKxGgxRBGYm+cEsQLdUAWmmnMVdwEL6LtD6lLG7 +WtVwfCARp9hn1m+ZxYUgOLbnx4mM8Ye6p+Hnj0ff05IrT99uB9F/FiCLrr7XvYlb +UE78VFFfBgUNbwyVos8UnMMQ2Y5aZa1cmJmVUYawa2+Y62LQP+c+T5ZTPyQZmQR6 +nd+cSPplJDamLUYxMhB1FLUWvBC8E6kZOFVjV4NsS62FZKV2ezbbGQAo/4oskFwK +CSF7tGvb/OCJ/AtJF0ReQNqn2wUuCGK5cULh8l86U34o/t75VU9nf1vZdrLjSThI +7oKhzKoQ8GuqXVj7hVx1 +-----END CERTIFICATE----- diff --git a/pkg/plugin/processor/builtin/impl/base64/decode.go b/pkg/plugin/processor/builtin/impl/base64/decode.go new file mode 100644 index 000000000..7ad2a192f --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/decode.go @@ -0,0 +1,121 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=decode_paramgen.go decodeConfig + +package base64 + +import ( + "context" + "encoding/base64" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" +) + +type decodeProcessor struct { + sdk.UnimplementedProcessor + + config decodeConfig + referenceResolver sdk.ReferenceResolver +} + +type decodeConfig struct { + // Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`). + // Note that it is not allowed to base64 decode the `.Position` field. + Field string `json:"field" validate:"required,exclusion=.Position"` +} + +func NewDecodeProcessor(log.CtxLogger) sdk.Processor { + return &decodeProcessor{} +} + +func (p *decodeProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "base64.decode", + Summary: "Decode a field to base64.", + Description: `The processor will decode the value of the target field from base64 and store the +result in the target field. It is not allowed to decode the ` + "`.Position`" + ` field.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: decodeConfig{}.Parameters(), + }, nil +} + +func (p *decodeProcessor) Configure(ctx context.Context, m map[string]string) error { + err := sdk.ParseConfig(ctx, m, &p.config, p.config.Parameters()) + if err != nil { + return cerrors.Errorf("failed to parse configuration: %w", err) + } + + resolver, err := sdk.NewReferenceResolver(p.config.Field) + if err != nil { + return cerrors.Errorf(`failed to parse the "field" parameter: %w`, err) + } + p.referenceResolver = resolver + + return nil +} + +func (p *decodeProcessor) Open(context.Context) error { return nil } + +func (p *decodeProcessor) Process(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + rec, err := p.base64Decode(rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, rec) + } + return out +} + +func (p *decodeProcessor) base64Decode(rec opencdc.Record) (sdk.ProcessedRecord, error) { + ref, err := p.referenceResolver.Resolve(&rec) + if err != nil { + return nil, cerrors.Errorf("failed to resolve the field: %w", err) + } + + var raw []byte + switch val := ref.Get().(type) { + case []byte: + raw = val + case opencdc.RawData: + raw = val + case string: + raw = []byte(val) + case nil: + return sdk.SingleRecord(rec), nil + default: + return nil, cerrors.Errorf("unexpected data type %T", val) + } + + decoded := make([]byte, base64.StdEncoding.DecodedLen(len(raw))) + n, err := base64.StdEncoding.Decode(decoded, raw) + if err != nil { + return nil, cerrors.Errorf("failed to decode the value: %w", err) + } + + err = ref.Set(string(decoded[:n])) + if err != nil { + return nil, cerrors.Errorf("failed to set the decoded value into the record: %w", err) + } + + return sdk.SingleRecord(rec), nil +} + +func (p *decodeProcessor) Teardown(context.Context) error { return nil } diff --git a/pkg/plugin/processor/builtin/impl/base64/decode_examples_test.go b/pkg/plugin/processor/builtin/impl/base64/decode_examples_test.go new file mode 100644 index 000000000..a57099885 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/decode_examples_test.go @@ -0,0 +1,80 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package base64 + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleDecodeProcessor() { + p := NewDecodeProcessor(log.Nop()) + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Decode a base64 encoded string", + Description: `This example decodes the base64 encoded string stored in +` + "`.Payload.After`" + `. Note that the result is a string, so if you want to +further process the result (e.g. parse the string as JSON), you need to chain +other processors (e.g. [` + "`json.decode`" + `](/docs/processors/builtin/json.decode)).`, + Config: map[string]string{ + "field": ".Payload.After.foo", + }, + Have: opencdc.Record{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.RawData("test-key"), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "YmFy", + }, + }, + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.RawData("test-key"), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,14 @@ + // { + // "position": "dGVzdC1wb3NpdGlvbg==", + // "operation": "create", + // "metadata": { + // "key1": "val1" + // }, + // "key": "test-key", + // "payload": { + // "before": null, + // "after": { + // - "foo": "YmFy" + // + "foo": "bar" + // } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/base64/decode_paramgen.go b/pkg/plugin/processor/builtin/impl/base64/decode_paramgen.go new file mode 100644 index 000000000..08d534960 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/decode_paramgen.go @@ -0,0 +1,22 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package base64 + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (decodeConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: "", + Description: "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nNote that it is not allowed to base64 decode the `.Position` field.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + config.ValidationExclusion{List: []string{".Position"}}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/base64/decode_test.go b/pkg/plugin/processor/builtin/impl/base64/decode_test.go new file mode 100644 index 000000000..f474e08cf --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/decode_test.go @@ -0,0 +1,143 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package base64 + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + "github.com/google/go-cmp/cmp" + "github.com/matryer/is" +) + +func TestDecodeProcessor_Success(t *testing.T) { + ctx := context.Background() + + testCases := []struct { + name string + field string + record opencdc.Record + want sdk.SingleRecord + }{{ + name: "decode raw data", + field: ".Key", + record: opencdc.Record{ + Key: opencdc.RawData("Zm9v"), + }, + want: sdk.SingleRecord{ + Key: opencdc.RawData("foo"), + }, + }, { + name: "decode string", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": "YmFy", + }, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }} + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + proc := NewDecodeProcessor(log.Nop()) + err := proc.Configure(ctx, map[string]string{"field": tc.field}) + is.NoErr(err) + got := proc.Process(ctx, []opencdc.Record{tc.record}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(tc.want, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} + +func TestDecodeProcessor_Fail(t *testing.T) { + ctx := context.Background() + + testCases := []struct { + name string + field string + record opencdc.Record + wantErr error + }{{ + name: "decode structured data", + field: ".Key", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": "bar", + }, + }, + wantErr: cerrors.New("unexpected data type opencdc.StructuredData"), + }, { + name: "decode map", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": map[string]any{ + "bar": "baz", + }, + }, + }, + wantErr: cerrors.New("unexpected data type map[string]interface {}"), + }, { + name: "decode int", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": 1, + }, + }, + wantErr: cerrors.New("unexpected data type int"), + }, { + name: "decode float", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": 1.1, + }, + }, + wantErr: cerrors.New("unexpected data type float64"), + }, { + name: "invalid base64 string", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": "bar", + }, + }, + wantErr: cerrors.New("failed to decode the value: illegal base64 data at input byte 0"), + }} + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + proc := NewDecodeProcessor(log.Nop()) + err := proc.Configure(ctx, map[string]string{"field": tc.field}) + is.NoErr(err) + got := proc.Process(ctx, []opencdc.Record{tc.record}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(sdk.ErrorRecord{Error: tc.wantErr}, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/base64/encode.go b/pkg/plugin/processor/builtin/impl/base64/encode.go new file mode 100644 index 000000000..8861696b4 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/encode.go @@ -0,0 +1,123 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=encode_paramgen.go encodeConfig + +package base64 + +import ( + "context" + "encoding/base64" + "fmt" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" +) + +type encodeProcessor struct { + sdk.UnimplementedProcessor + + config encodeConfig + referenceResolver sdk.ReferenceResolver +} + +type encodeConfig struct { + // Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`). + // Note that it is not allowed to base64 encode the `.Position` field. + Field string `json:"field" validate:"required,exclusion=.Position"` +} + +func NewEncodeProcessor(log.CtxLogger) sdk.Processor { + return &encodeProcessor{} +} + +func (p *encodeProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "base64.encode", + Summary: "Encode a field to base64.", + Description: `The processor will encode the value of the target field to base64 and store the +result in the target field. It is not allowed to encode the ` + "`.Position`" + ` field. +If the provided field doesn't exist, the processor will create that field and +assign its value.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: encodeConfig{}.Parameters(), + }, nil +} + +func (p *encodeProcessor) Configure(ctx context.Context, m map[string]string) error { + err := sdk.ParseConfig(ctx, m, &p.config, p.config.Parameters()) + if err != nil { + return cerrors.Errorf("failed to parse configuration: %w", err) + } + + resolver, err := sdk.NewReferenceResolver(p.config.Field) + if err != nil { + return cerrors.Errorf(`failed to parse the "field" parameter: %w`, err) + } + p.referenceResolver = resolver + + return nil +} + +func (p *encodeProcessor) Open(context.Context) error { return nil } + +func (p *encodeProcessor) Process(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + rec, err := p.base64Encode(rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, rec) + } + return out +} + +func (p *encodeProcessor) base64Encode(rec opencdc.Record) (sdk.ProcessedRecord, error) { + ref, err := p.referenceResolver.Resolve(&rec) + if err != nil { + return nil, cerrors.Errorf("failed to resolve the field: %w", err) + } + + var raw []byte + switch val := ref.Get().(type) { + case []byte: + raw = val + case opencdc.RawData: + raw = val + case string: + raw = []byte(val) + case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64, bool: + raw = []byte(fmt.Sprintf("%v", val)) + case nil: + return sdk.SingleRecord(rec), nil + default: + return nil, cerrors.Errorf("unexpected data type %T", val) + } + + encoded := make([]byte, base64.StdEncoding.EncodedLen(len(raw))) + base64.StdEncoding.Encode(encoded, raw) + + err = ref.Set(string(encoded)) + if err != nil { + return nil, cerrors.Errorf("failed to set the encoded value into the record: %w", err) + } + + return sdk.SingleRecord(rec), nil +} + +func (p *encodeProcessor) Teardown(context.Context) error { return nil } diff --git a/pkg/plugin/processor/builtin/impl/base64/encode_examples_test.go b/pkg/plugin/processor/builtin/impl/base64/encode_examples_test.go new file mode 100644 index 000000000..4dc073dd4 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/encode_examples_test.go @@ -0,0 +1,134 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package base64 + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleEncodeProcessor_rawData() { + p := NewEncodeProcessor(log.Nop()) + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Encode record key to base64", + Description: `TThis example takes a record containing raw data in +` + ".Key" + ` and converts it into a base64 encoded string.`, + Config: map[string]string{ + "field": ".Key", + }, + Have: opencdc.Record{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.RawData("test-key"), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.RawData("dGVzdC1rZXk="), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,14 @@ + // { + // "position": "dGVzdC1wb3NpdGlvbg==", + // "operation": "create", + // "metadata": { + // "key1": "val1" + // }, + // - "key": "test-key", + // + "key": "dGVzdC1rZXk=", + // "payload": { + // "before": null, + // "after": { + // "foo": "bar" + // } + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleBase64EncodeProcessor_stringField() { + p := NewEncodeProcessor(log.Nop()) + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Encode nested value to base64", + Description: `This example takes a record containing a string in +` + ".Payload.Before.foo" + ` and converts it into a base64 encoded string.`, + Config: map[string]string{ + "field": ".Payload.After.foo", + }, + Have: opencdc.Record{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.RawData("test-key"), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Key: opencdc.RawData("test-key"), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "YmFy", + }, + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,14 @@ + // { + // "position": "dGVzdC1wb3NpdGlvbg==", + // "operation": "create", + // "metadata": { + // "key1": "val1" + // }, + // "key": "test-key", + // "payload": { + // "before": null, + // "after": { + // - "foo": "bar" + // + "foo": "YmFy" + // } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/base64/encode_paramgen.go b/pkg/plugin/processor/builtin/impl/base64/encode_paramgen.go new file mode 100644 index 000000000..603796ce9 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/encode_paramgen.go @@ -0,0 +1,22 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package base64 + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (encodeConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: "", + Description: "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nNote that it is not allowed to base64 encode the `.Position` field.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + config.ValidationExclusion{List: []string{".Position"}}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/base64/encode_test.go b/pkg/plugin/processor/builtin/impl/base64/encode_test.go new file mode 100644 index 000000000..e5fd37a8f --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/encode_test.go @@ -0,0 +1,173 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package base64 + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + "github.com/google/go-cmp/cmp" + "github.com/matryer/is" +) + +func TestEncodeProcessor_Success(t *testing.T) { + ctx := context.Background() + + testCases := []struct { + name string + field string + record opencdc.Record + want sdk.SingleRecord + }{{ + name: "encode raw data", + field: ".Key", + record: opencdc.Record{ + Key: opencdc.RawData("foo"), + }, + want: sdk.SingleRecord{ + Key: opencdc.RawData("Zm9v"), + }, + }, { + name: "encode string", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": "bar", + }, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{ + "foo": "YmFy", + }, + }, + }, { + name: "encode int", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": 1, + }, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{ + "foo": "MQ==", + }, + }, + }, { + name: "encode float", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": 1.1, + }, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{ + "foo": "MS4x", + }, + }, + }, { + name: "encode bool", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": true, + }, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{ + "foo": "dHJ1ZQ==", + }, + }, + }, { + name: "encode []byte", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": []byte("bar"), + }, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{ + "foo": "YmFy", + }, + }, + }, { + name: "encode nil", + field: ".Key", + record: opencdc.Record{}, + want: sdk.SingleRecord{}, + }} + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + proc := NewEncodeProcessor(log.Nop()) + err := proc.Configure(ctx, map[string]string{"field": tc.field}) + is.NoErr(err) + got := proc.Process(ctx, []opencdc.Record{tc.record}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(tc.want, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} + +func TestEncodeProcessor_Fail(t *testing.T) { + ctx := context.Background() + + testCases := []struct { + name string + field string + record opencdc.Record + wantErr error + }{{ + name: "encode structured data", + field: ".Key", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": "bar", + }, + }, + wantErr: cerrors.New("unexpected data type opencdc.StructuredData"), + }, { + name: "encode map", + field: ".Key.foo", + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "foo": map[string]any{ + "bar": "baz", + }, + }, + }, + wantErr: cerrors.New("unexpected data type map[string]interface {}"), + }} + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + proc := NewEncodeProcessor(log.Nop()) + err := proc.Configure(ctx, map[string]string{"field": tc.field}) + is.NoErr(err) + got := proc.Process(ctx, []opencdc.Record{tc.record}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(sdk.ErrorRecord{Error: tc.wantErr}, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/base64/examples_exporter_test.go b/pkg/plugin/processor/builtin/impl/base64/examples_exporter_test.go new file mode 100644 index 000000000..7d1b76e71 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/examples_exporter_test.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build export_processors + +package base64 + +import ( + "os" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +func TestMain(m *testing.M) { + code := m.Run() + if code > 0 { + os.Exit(code) + } + + // tests passed, export the processors + exampleutil.ExportProcessors() +} diff --git a/pkg/plugin/processor/builtin/impl/base64/examples_test.go b/pkg/plugin/processor/builtin/impl/base64/examples_test.go new file mode 100644 index 000000000..165c16065 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/base64/examples_test.go @@ -0,0 +1,17 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go test -count=1 -tags export_processors . + +package base64 diff --git a/pkg/plugin/processor/builtin/impl/custom/examples_exporter_test.go b/pkg/plugin/processor/builtin/impl/custom/examples_exporter_test.go new file mode 100644 index 000000000..13f0cc783 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/custom/examples_exporter_test.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build export_processors + +package custom + +import ( + "os" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +func TestMain(m *testing.M) { + code := m.Run() + if code > 0 { + os.Exit(code) + } + + // tests passed, export the processors + exampleutil.ExportProcessors() +} diff --git a/pkg/plugin/processor/builtin/impl/custom/examples_test.go b/pkg/plugin/processor/builtin/impl/custom/examples_test.go new file mode 100644 index 000000000..7fa68d704 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/custom/examples_test.go @@ -0,0 +1,17 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go test -count=1 -tags export_processors . + +package custom diff --git a/pkg/plugin/processor/builtin/impl/custom/javascript.go b/pkg/plugin/processor/builtin/impl/custom/javascript.go new file mode 100644 index 000000000..fe796c30d --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/custom/javascript.go @@ -0,0 +1,323 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=javascript_paramgen.go javascriptConfig + +package custom + +import ( + "context" + "os" + "sync" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/dop251/goja" + "github.com/dop251/goja_nodejs/require" +) + +const entrypoint = "process" + +// jsRecord is an intermediary representation of opencdc.Record that is passed to +// the JavaScript transform. We use this because using opencdc.Record would not +// allow us to modify or access certain data (e.g. metadata or structured data). +type jsRecord struct { + Position []byte + Operation string + Metadata map[string]string + Key any + Payload struct { + Before any + After any + } +} + +// gojaContext represents one independent goja context. +type gojaContext struct { + runtime *goja.Runtime + function goja.Callable +} + +type javascriptConfig struct { + // JavaScript code for this processor. + // It needs to have a function `process()` that accept + // a record and returns a record. + // The `process()` function can either modify the input record and return it, + // or create a new record. + // If a record needs to be filtered (dropped from the pipeline), + // then the `process()` function should return a `null`. + Script string `json:"script"` + // The path to a .js file containing the processor code. + ScriptPath string `json:"script.path"` +} + +type javascriptProcessor struct { + sdk.UnimplementedProcessor + + // src is the JavaScript code that will be executed + src string + + gojaPool sync.Pool + logger log.CtxLogger +} + +func NewJavascriptProcessor(logger log.CtxLogger) sdk.Processor { + return &javascriptProcessor{logger: logger} +} + +func (p *javascriptProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "custom.javascript", + Summary: "JavaScript processor", + Description: `A processor that makes it possible to process Conduit records using JavaScript. + +The following helper functions and fields are available: +* logger: a logger that outputs to Conduit's logs. Check zerolog's API on how to use it. +* Record(): constructs a new record which represents a successful processing result. +It's analogous to sdk.SingleRecord from Conduit's Go processor SDK. +* RawData(): creates a raw data object. It's analogous to opencdc.RawData. Optionally, it +accepts a string argument, which will be cast into a byte array, for example: record.Key = RawData("new key"). +* StructuredData(): creates a structured data (map-like) object. + +To find out what's possible with the JS processors, also refer to the documentation for +[goja](https://github.com/dop251/goja), which is the JavaScript engine we use.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: javascriptConfig{}.Parameters(), + }, nil +} + +func (p *javascriptProcessor) Configure(ctx context.Context, m map[string]string) error { + cfg := javascriptConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, cfg.Parameters()) + if err != nil { + return cerrors.Errorf("failed parsing configuration: %w", err) + } + + switch { + case cfg.Script != "" && cfg.ScriptPath != "": + return cerrors.New("only one of: [script, script.path] should be provided") + case cfg.Script != "": + p.src = cfg.Script + case cfg.ScriptPath != "": + file, err := os.ReadFile(cfg.ScriptPath) + if err != nil { + return cerrors.Errorf("error reading script from path %v: %w", cfg.ScriptPath, err) + } + p.src = string(file) + default: + return cerrors.New("one of: [script, script.path] needs to be provided") + } + + return nil +} + +func (p *javascriptProcessor) Open(context.Context) error { + runtime, err := p.newRuntime(p.logger) + if err != nil { + return cerrors.Errorf("failed initializing JS runtime: %w", err) + } + + _, err = p.newFunction(runtime, p.src) + if err != nil { + return cerrors.Errorf("failed initializing JS function: %w", err) + } + + p.gojaPool.New = func() any { + // create a new runtime for the function, so it's executed in a separate goja context + rt, _ := p.newRuntime(p.logger) + f, _ := p.newFunction(rt, p.src) + return &gojaContext{ + runtime: rt, + function: f, + } + } + + return nil +} + +func (p *javascriptProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + g := p.gojaPool.Get().(*gojaContext) + defer p.gojaPool.Put(g) + + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + jsRecs := p.toJSRecord(g.runtime, rec) + result, err := g.function(goja.Undefined(), jsRecs) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + + proc, err := p.toSDKRecords(result) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + + out = append(out, proc) + } + + return out +} + +func (p *javascriptProcessor) Teardown(context.Context) error { + return nil +} + +func (p *javascriptProcessor) newRuntime(logger log.CtxLogger) (*goja.Runtime, error) { + rt := goja.New() + require.NewRegistry().Enable(rt) + + runtimeHelpers := map[string]interface{}{ + "logger": &logger, + "Record": p.newSingleRecord(rt), + "RawData": p.jsContentRaw(rt), + "StructuredData": p.jsContentStructured(rt), + } + + for name, helper := range runtimeHelpers { + if err := rt.Set(name, helper); err != nil { + return nil, cerrors.Errorf("failed to set helper %q: %w", name, err) + } + } + + return rt, nil +} + +func (p *javascriptProcessor) newFunction(runtime *goja.Runtime, src string) (goja.Callable, error) { + prg, err := goja.Compile("", src, false) + if err != nil { + return nil, cerrors.Errorf("failed to compile script: %w", err) + } + + _, err = runtime.RunProgram(prg) + if err != nil { + return nil, cerrors.Errorf("failed to run program: %w", err) + } + + tmp := runtime.Get(entrypoint) + entrypointFunc, ok := goja.AssertFunction(tmp) + if !ok { + return nil, cerrors.Errorf("failed to get entrypoint function %q", entrypoint) + } + + return entrypointFunc, nil +} + +func (p *javascriptProcessor) newSingleRecord(runtime *goja.Runtime) func(goja.ConstructorCall) *goja.Object { + return func(call goja.ConstructorCall) *goja.Object { + // We return a singleRecord struct, however because we are + // not changing call.This instanceof will not work as expected. + + // JavaScript records are always initialized with metadata + // so that it's easier to write processor code + // (without worrying about initializing it every time) + r := jsRecord{ + Metadata: make(map[string]string), + } + // We need to return a pointer to make the returned object mutable. + return runtime.ToValue(&r).ToObject(runtime) + } +} + +func (p *javascriptProcessor) jsContentRaw(runtime *goja.Runtime) func(goja.ConstructorCall) *goja.Object { + return func(call goja.ConstructorCall) *goja.Object { + var r opencdc.RawData + if len(call.Arguments) > 0 { + r = opencdc.RawData(call.Argument(0).String()) + } + // We need to return a pointer to make the returned object mutable. + return runtime.ToValue(&r).ToObject(runtime) + } +} + +func (p *javascriptProcessor) jsContentStructured(runtime *goja.Runtime) func(goja.ConstructorCall) *goja.Object { + return func(call goja.ConstructorCall) *goja.Object { + // TODO accept arguments + // We return a map[string]interface{} struct, however because we are + // not changing call.This instanceof will not work as expected. + + r := make(map[string]interface{}) + return runtime.ToValue(r).ToObject(runtime) + } +} + +func (p *javascriptProcessor) toJSRecord(runtime *goja.Runtime, r opencdc.Record) goja.Value { + convertData := func(d opencdc.Data) interface{} { + switch v := d.(type) { + case opencdc.RawData: + return &v + case opencdc.StructuredData: + return map[string]interface{}(v) + } + return nil + } + + jsRec := &jsRecord{ + Position: r.Position, + Operation: r.Operation.String(), + Metadata: r.Metadata, + Key: convertData(r.Key), + Payload: struct { + Before interface{} + After interface{} + }{ + Before: convertData(r.Payload.Before), + After: convertData(r.Payload.After), + }, + } + + // we need to send in a pointer to let the user change the value and return it, if they choose to do so + return runtime.ToValue(jsRec) +} + +func (p *javascriptProcessor) toSDKRecords(v goja.Value) (sdk.ProcessedRecord, error) { + raw := v.Export() + if raw == nil { + return sdk.FilterRecord{}, nil + } + + jsr, ok := v.Export().(*jsRecord) + if !ok { + return nil, cerrors.Errorf("js function expected to return %T, but returned: %T", &jsRecord{}, v) + } + + var op opencdc.Operation + err := op.UnmarshalText([]byte(jsr.Operation)) + if err != nil { + return nil, cerrors.Errorf("could not unmarshal operation: %w", err) + } + + convertData := func(d interface{}) opencdc.Data { + switch v := d.(type) { + case *opencdc.RawData: + return *v + case map[string]interface{}: + return opencdc.StructuredData(v) + } + return nil + } + + return sdk.SingleRecord{ + Position: jsr.Position, + Operation: op, + Metadata: jsr.Metadata, + Key: convertData(jsr.Key), + Payload: opencdc.Change{ + Before: convertData(jsr.Payload.Before), + After: convertData(jsr.Payload.After), + }, + }, nil +} diff --git a/pkg/plugin/processor/builtin/impl/custom/javascript_examples_test.go b/pkg/plugin/processor/builtin/impl/custom/javascript_examples_test.go new file mode 100644 index 000000000..db9c31fd8 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/custom/javascript_examples_test.go @@ -0,0 +1,79 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package custom + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleJavascriptProcessor() { + p := NewJavascriptProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Modify a record's metadata and payload using JavaScript", + Description: "In this example we use the `custom.javascript` processor to add a metadata key " + + "to the input record. It also prepends \"hello, \" to `.Payload.After`.", + Config: map[string]string{ + "script": `function process(rec) { + rec.Metadata["processed"] = "true"; + let existing = String.fromCharCode.apply(String, rec.Payload.After); + rec.Payload.After = RawData("hello, " + existing); + return rec; +}`, + }, + Have: opencdc.Record{ + Metadata: map[string]string{ + "existing-key": "existing-value", + }, + Payload: opencdc.Change{ + After: opencdc.RawData("world"), + }, + }, + Want: sdk.SingleRecord{ + Metadata: map[string]string{ + "existing-key": "existing-value", + "processed": "true", + }, + Payload: opencdc.Change{ + After: opencdc.RawData("hello, world"), + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,12 +1,13 @@ + // { + // "position": null, + // "operation": "Operation(0)", + // "metadata": { + // - "existing-key": "existing-value" + // + "existing-key": "existing-value", + // + "processed": "true" + // }, + // "key": null, + // "payload": { + // "before": null, + // - "after": "world" + // + "after": "hello, world" + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/custom/javascript_paramgen.go b/pkg/plugin/processor/builtin/impl/custom/javascript_paramgen.go new file mode 100644 index 000000000..65e7eb6b9 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/custom/javascript_paramgen.go @@ -0,0 +1,25 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package custom + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (javascriptConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "script": { + Default: "", + Description: "JavaScript code for this processor.\nIt needs to have a function `process()` that accept\na record and returns a record.\nThe `process()` function can either modify the input record and return it,\nor create a new record.\nIf a record needs to be filtered (dropped from the pipeline),\nthen the `process()` function should return a `null`.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "script.path": { + Default: "", + Description: "The path to a .js file containing the processor code.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/custom/javascript_test.go b/pkg/plugin/processor/builtin/impl/custom/javascript_test.go new file mode 100644 index 000000000..5350b94a7 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/custom/javascript_test.go @@ -0,0 +1,475 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package custom + +import ( + "bytes" + "context" + "strings" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/dop251/goja" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/matryer/is" + "github.com/rs/zerolog" +) + +func TestJSProcessor_Logger(t *testing.T) { + is := is.New(t) + ctx := context.Background() + + var buf bytes.Buffer + logger := log.New(zerolog.New(&buf)) + underTest := NewJavascriptProcessor(logger) + err := underTest.Configure( + ctx, + map[string]string{ + "script": ` + function process(r) { + logger.Info().Msg("Hello"); + return r + } + `, + }, + ) + is.NoErr(err) + + err = underTest.Open(ctx) + is.NoErr(err) + + _ = underTest.Process(context.Background(), []opencdc.Record{{}}) + + is.Equal(`{"level":"info","message":"Hello"}`+"\n", buf.String()) // expected different log message +} + +func TestJSProcessor_MissingEntrypoint(t *testing.T) { + is := is.New(t) + ctx := context.Background() + + underTest := NewJavascriptProcessor(log.Nop()) + err := underTest.Configure( + ctx, + map[string]string{"script": `function something() { logger.Debug("no entrypoint"); }`}, + ) + is.NoErr(err) + + err = underTest.Open(ctx) + is.True(err != nil) // expected error + is.Equal( + `failed initializing JS function: failed to get entrypoint function "process"`, + err.Error(), + ) // expected different error message +} + +func TestJSProcessor_Error(t *testing.T) { + is := is.New(t) + underTest := newTestJavaScriptProc( + t, + `function process(r) { + throw new Error('something bad happened'); + }`, + ) + + got := underTest.Process(context.Background(), []opencdc.Record{{}}) + is.Equal(1, len(got)) + gotErr, ok := got[0].(sdk.ErrorRecord) + is.True(ok) + is.True(strings.Contains(gotErr.Error.Error(), "something bad happened")) +} + +func TestJSProcessor_Process(t *testing.T) { + tests := []struct { + name string + script string + args []opencdc.Record + want []sdk.ProcessedRecord + }{ + { + name: "change fields of structured record", + script: ` + function process(rec) { + rec.Operation = "update"; + rec.Metadata["returned"] = "JS"; + rec.Key = RawData("baz"); + rec.Payload.After["ccc"] = "baz"; + return rec; + }`, + args: []opencdc.Record{ + { + Position: []byte("2"), + Operation: opencdc.OperationCreate, + Metadata: opencdc.Metadata{"existing": "val"}, + Key: opencdc.RawData("bar"), + Payload: opencdc.Change{ + After: opencdc.StructuredData( + map[string]interface{}{ + "aaa": 111, + "bbb": []string{"foo", "bar"}, + }, + ), + }, + }, + }, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Position: []byte("2"), + Operation: opencdc.OperationUpdate, + Metadata: opencdc.Metadata{"existing": "val", "returned": "JS"}, + Key: opencdc.RawData("baz"), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "aaa": 111, + "bbb": []string{"foo", "bar"}, + "ccc": "baz", + }, + }, + }, + }, + }, + { + name: "complete change incoming record with structured data", + script: ` + function process(rec) { + rec.Metadata["returned"] = "JS"; + rec.Key = RawData("baz"); + rec.Payload.After = new StructuredData(); + rec.Payload.After["foo"] = "bar"; + return rec; + }`, + args: []opencdc.Record{ + { + Position: []byte("2"), + Metadata: opencdc.Metadata{"existing": "val"}, + Key: opencdc.RawData("bar"), + Payload: opencdc.Change{ + After: opencdc.RawData("foo"), + }, + }, + }, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Position: []byte("2"), + Metadata: opencdc.Metadata{"existing": "val", "returned": "JS"}, + Key: opencdc.RawData("baz"), + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }, + }, + }, + { + name: "complete change incoming record with raw data", + script: ` + function process(rec) { + rec.Metadata["returned"] = "JS"; + rec.Key = RawData("baz"); + rec.Payload.After = RawData(String.fromCharCode.apply(String, rec.Payload.After) + "bar"); + return rec; + }`, + args: []opencdc.Record{ + { + Position: []byte("3"), + Metadata: opencdc.Metadata{"existing": "val"}, + Key: opencdc.RawData("bar"), + Payload: opencdc.Change{ + After: opencdc.RawData("foo"), + }, + }, + }, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Position: []byte("3"), + Metadata: opencdc.Metadata{"existing": "val", "returned": "JS"}, + Key: opencdc.RawData("baz"), + Payload: opencdc.Change{ + After: opencdc.RawData("foobar"), + }, + }, + }, + }, + { + name: "return new SingleRecord with raw data", + script: ` + function process(record) { + r = new Record(); + r.Position = "3" + r.Metadata["returned"] = "JS"; + r.Key = new RawData("baz"); + r.Payload.After = new RawData("foobar"); + return r; + }`, + args: []opencdc.Record{{Position: opencdc.Position("3")}}, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Position: []byte("3"), + Metadata: opencdc.Metadata{"returned": "JS"}, + Key: opencdc.RawData("baz"), + Payload: opencdc.Change{ + After: opencdc.RawData("foobar"), + }, + }, + }, + }, + { + name: "use empty raw data", + script: ` + function process(record) { + r = new Record(); + r.Position = "3"; + r.Payload.After = new RawData("foobar"); + return r; + }`, + args: []opencdc.Record{{Position: opencdc.Position("3")}}, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Position: []byte("3"), + // JavaScript records are always initialized with metadata + Metadata: opencdc.Metadata{}, + Payload: opencdc.Change{ + After: opencdc.RawData("foobar"), + }, + }, + }, + }, + { + name: "filter: always skip", + script: `function process(r) { + return null; + }`, + args: []opencdc.Record{{}}, + want: []sdk.ProcessedRecord{sdk.FilterRecord{}}, + }, + { + name: "filter: not matching", + script: `function process(r) { + if (r.Metadata["keepme"] != undefined) { + return r + } + return null; + }`, + args: []opencdc.Record{{Metadata: opencdc.Metadata{"keepme": "yes"}}}, + want: []sdk.ProcessedRecord{sdk.SingleRecord{Metadata: opencdc.Metadata{"keepme": "yes"}}}, + }, + { + name: "filter: not matching", + script: `function process(r) { + if (r.Metadata["keepme"] != undefined) { + return r + } + return null; + }`, + args: []opencdc.Record{{Metadata: opencdc.Metadata{"foo": "bar"}}}, + want: []sdk.ProcessedRecord{sdk.FilterRecord{}}, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + underTest := newTestJavaScriptProc(t, tc.script) + + got := underTest.Process(context.Background(), tc.args) + diff := cmp.Diff(tc.want, got, cmpopts.IgnoreUnexported(sdk.SingleRecord{})) + if diff != "" { + t.Errorf("mismatch (-want +got):\n%s", diff) + } + }) + } +} + +func TestJSProcessor_DataTypes(t *testing.T) { + testCases := []struct { + name string + src string + input []opencdc.Record + want []sdk.ProcessedRecord + }{ + { + name: "position from string", + src: `function process(rec) { + rec.Position = "foobar"; + return rec; + }`, + input: []opencdc.Record{{}}, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Position: opencdc.Position("foobar"), + }, + }, + }, + { + name: "raw payload, data from string", + src: `function process(rec) { + rec.Payload.After = new RawData("foobar"); + return rec; + }`, + input: []opencdc.Record{{}}, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.RawData("foobar"), + }, + }, + }, + }, + { + name: "raw key, data from string", + src: `function process(rec) { + rec.Key = new RawData("foobar"); + return rec; + }`, + input: []opencdc.Record{{}}, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Key: opencdc.RawData("foobar"), + }, + }, + }, + { + name: "update metadata", + src: `function process(rec) { + rec.Metadata["new_key"] = "new_value" + delete rec.Metadata.remove_me; + return rec; + }`, + input: []opencdc.Record{{ + Metadata: opencdc.Metadata{ + "old_key": "old_value", + "remove_me": "remove_me", + }, + }}, + want: []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Metadata: opencdc.Metadata{ + "old_key": "old_value", + "new_key": "new_value", + }, + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + underTest := newTestJavaScriptProc(t, tc.src) + + got := underTest.Process(context.Background(), tc.input) + is.Equal(tc.want, got) // expected different record + }) + } +} + +func TestJSProcessor_JavaScriptException(t *testing.T) { + is := is.New(t) + + src := `function process(record) { + var m; + m.test + }` + underTest := newTestJavaScriptProc(t, src) + + r := []opencdc.Record{{ + Key: opencdc.RawData("test key"), + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData("test payload"), + }, + }} + got := underTest.Process(context.Background(), r) + errRec, isErrRec := got[0].(sdk.ErrorRecord) + is.True(isErrRec) // expected error + target := &goja.Exception{} + is.True(cerrors.As(errRec.Error, &target)) // expected a goja.Exception +} + +func TestJSProcessor_BrokenJSCode(t *testing.T) { + is := is.New(t) + ctx := context.Background() + src := `function {` + + p := NewJavascriptProcessor(log.Test(t)) + err := p.Configure( + ctx, + map[string]string{ + "script": src, + }, + ) + is.NoErr(err) // expected no error when configuration the JS processor + + err = p.Open(ctx) + is.True(err != nil) // expected error for invalid JS code + target := &goja.CompilerSyntaxError{} + is.True(cerrors.As(err, &target)) // expected a goja.CompilerSyntaxError +} + +func TestJSProcessor_ScriptWithMultipleFunctions(t *testing.T) { + is := is.New(t) + + src := ` + function getValue() { + return "updated_value"; + } + + function process(rec) { + rec.Metadata["updated_key"] = getValue() + return rec; + } + ` + underTest := newTestJavaScriptProc(t, src) + + r := []opencdc.Record{{ + Metadata: opencdc.Metadata{ + "old_key": "old_value", + }, + }} + + got := underTest.Process(context.Background(), r) + rec, ok := got[0].(sdk.SingleRecord) + is.True(ok) // expected a processed record + is.Equal( + sdk.SingleRecord{ + Metadata: opencdc.Metadata{ + "old_key": "old_value", + "updated_key": "updated_value", + }, + }, + rec, + ) // expected different record +} + +func newTestJavaScriptProc(t *testing.T, src string) sdk.Processor { + is := is.New(t) + ctx := context.Background() + + p := NewJavascriptProcessor(log.Test(t)) + err := p.Configure( + ctx, + map[string]string{ + "script": src, + }, + ) + is.NoErr(err) // expected no error when configuration the JS processor + err = p.Open(ctx) + is.NoErr(err) // expected no error when opening the JS processor + + return p +} diff --git a/pkg/plugin/processor/builtin/impl/examples_exporter_test.go b/pkg/plugin/processor/builtin/impl/examples_exporter_test.go new file mode 100644 index 000000000..20eacdfa5 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/examples_exporter_test.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build export_processors + +package impl + +import ( + "os" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +func TestMain(m *testing.M) { + code := m.Run() + if code > 0 { + os.Exit(code) + } + + // tests passed, export the processors + exampleutil.ExportProcessors() +} diff --git a/pkg/plugin/processor/builtin/impl/examples_test.go b/pkg/plugin/processor/builtin/impl/examples_test.go new file mode 100644 index 000000000..1f6965d0b --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/examples_test.go @@ -0,0 +1,17 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go test -count=1 -tags export_processors . + +package impl diff --git a/pkg/plugin/processor/builtin/impl/field/convert.go b/pkg/plugin/processor/builtin/impl/field/convert.go new file mode 100644 index 000000000..7e87fab2c --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/convert.go @@ -0,0 +1,158 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=convert_paramgen.go convertConfig + +package field + +import ( + "context" + "fmt" + "strconv" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" +) + +type convertProcessor struct { + referenceResolver sdk.ReferenceResolver + config convertConfig + + sdk.UnimplementedProcessor +} + +func NewConvertProcessor(log.CtxLogger) sdk.Processor { + return &convertProcessor{} +} + +type convertConfig struct { + // Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`). + // you can only convert fields that are under .Key and .Payload, and said fields should contain structured data. + Field string `json:"field" validate:"required,regex=^\\.(Payload|Key).*"` + // Type is the target field type after conversion, available options are: string, int, float, bool. + Type string `json:"type" validate:"required,inclusion=string|int|float|bool"` +} + +func (p *convertProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "field.convert", + Summary: "Convert the type of a field.", + Description: `Convert takes the field of one type and converts it into another type (e.g. string to integer). +The applicable types are string, int, float and bool. Converting can be done between any combination of types. Note that +booleans will be converted to numeric values 1 (true) and 0 (false). Processor is only applicable to .Key, .Payload.Before +and .Payload.After prefixes, and only applicable if said fields contain structured data. +If the record contains raw JSON data, then use the processor [` + "json.decode" + `](/docs/processors/builtin/json.decode) +to parse it into structured data first.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: convertConfig{}.Parameters(), + }, nil +} + +func (p *convertProcessor) Configure(ctx context.Context, m map[string]string) error { + err := sdk.ParseConfig(ctx, m, &p.config, convertConfig{}.Parameters()) + if err != nil { + return cerrors.Errorf("failed to parse configuration: %w", err) + } + + resolver, err := sdk.NewReferenceResolver(p.config.Field) + if err != nil { + return cerrors.Errorf("failed to parse the %q param: %w", "field", err) + } + p.referenceResolver = resolver + return nil +} + +func (p *convertProcessor) Open(context.Context) error { + return nil +} + +func (p *convertProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, record := range records { + rec := record + ref, err := p.referenceResolver.Resolve(&rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + newVal, err := p.stringToType(p.toString(ref.Get()), p.config.Type) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + err = ref.Set(newVal) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, sdk.SingleRecord(rec)) + } + return out +} + +func (p *convertProcessor) stringToType(value, typ string) (any, error) { + switch typ { + case "string": + return value, nil + case "int": + newVal, err := strconv.Atoi(value) + if err != nil { + return nil, err + } + return newVal, nil + case "float": + newVal, err := strconv.ParseFloat(value, 64) + if err != nil { + return nil, err + } + return newVal, nil + case "bool": + newVal, err := strconv.ParseBool(value) + if err != nil { + return nil, err + } + return newVal, nil + default: + return nil, cerrors.Errorf("undefined type %q", typ) + } +} + +func (p *convertProcessor) toString(value any) string { + switch v := value.(type) { + case string: + return v + case int: + return strconv.Itoa(v) + case float64: + return strconv.FormatFloat(v, 'f', -1, 64) + case bool: + if p.config.Type == "int" || p.config.Type == "float" { + return p.boolToStringNumber(v) + } + return strconv.FormatBool(v) + default: + return fmt.Sprintf("%v", value) + } +} + +func (p *convertProcessor) boolToStringNumber(b bool) string { + if b { + return "1" + } + return "0" +} + +func (p *convertProcessor) Teardown(context.Context) error { + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/field/convert_examples_test.go b/pkg/plugin/processor/builtin/impl/field/convert_examples_test.go new file mode 100644 index 000000000..eae6beb9c --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/convert_examples_test.go @@ -0,0 +1,145 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package field + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleConvertProcessor_stringToInt() { + p := NewConvertProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Convert `string` to `int`", + Description: "This example takes the string in field `.Key.id` and changes its data type to `int`.", + Config: map[string]string{"field": ".Key.id", "type": "int"}, + Have: opencdc.Record{ + Operation: opencdc.OperationUpdate, + Key: opencdc.StructuredData{"id": "123"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar"}}, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationUpdate, + Key: opencdc.StructuredData{"id": 123}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar"}}, + }}) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,14 @@ + // { + // "position": null, + // "operation": "update", + // "metadata": null, + // "key": { + // - "id": "123" + // + "id": 123 + // }, + // "payload": { + // "before": null, + // "after": { + // "foo": "bar" + // } + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleConvertProcessor_intToBool() { + p := NewConvertProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Convert `int` to `bool`", + Description: "This example takes the `int` in field `.Payload.After.done` and changes its data type to `bool`.", + Config: map[string]string{"field": ".Payload.After.done", "type": "bool"}, + Have: opencdc.Record{ + Operation: opencdc.OperationUpdate, + Key: opencdc.StructuredData{"id": "123"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"done": 1}}, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationUpdate, + Key: opencdc.StructuredData{"id": "123"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"done": true}}, + }}) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,14 @@ + // { + // "position": null, + // "operation": "update", + // "metadata": null, + // "key": { + // "id": "123" + // }, + // "payload": { + // "before": null, + // "after": { + // - "done": 1 + // + "done": true + // } + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleConvertProcessor_floatToString() { + p := NewConvertProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Convert `float` to `string`", + Description: "This example takes the `float` in field `.Key.id` and changes its data type to `string`.", + Config: map[string]string{"field": ".Key.id", "type": "string"}, + Have: opencdc.Record{ + Operation: opencdc.OperationUpdate, + Key: opencdc.StructuredData{"id": 123.345}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar"}}, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationUpdate, + Key: opencdc.StructuredData{"id": "123.345"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar"}}, + }}) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,14 @@ + // { + // "position": null, + // "operation": "update", + // "metadata": null, + // "key": { + // - "id": 123.345 + // + "id": "123.345" + // }, + // "payload": { + // "before": null, + // "after": { + // "foo": "bar" + // } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/field/convert_paramgen.go b/pkg/plugin/processor/builtin/impl/field/convert_paramgen.go new file mode 100644 index 000000000..af5d23f13 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/convert_paramgen.go @@ -0,0 +1,33 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package field + +import ( + "regexp" + + "github.com/conduitio/conduit-commons/config" +) + +func (convertConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: "", + Description: "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nyou can only convert fields that are under .Key and .Payload, and said fields should contain structured data.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + config.ValidationRegex{Regex: regexp.MustCompile("^\\.(Payload|Key).*")}, + }, + }, + "type": { + Default: "", + Description: "Type is the target field type after conversion, available options are: string, int, float, bool.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + config.ValidationInclusion{List: []string{"string", "int", "float", "bool"}}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/field/convert_test.go b/pkg/plugin/processor/builtin/impl/field/convert_test.go new file mode 100644 index 000000000..671739277 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/convert_test.go @@ -0,0 +1,310 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package field + +import ( + "context" + "strings" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/matryer/is" +) + +func TestConvertField_Process(t *testing.T) { + proc := NewConvertProcessor(log.Nop()).(*convertProcessor) + ctx := context.Background() + var err error + testCases := []struct { + name string + field string + typ string + record opencdc.Record + want sdk.SingleRecord + }{ + { + name: "string to int", + field: ".Key.id", + typ: "int", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": "54"}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": 54}, + }, + }, { + name: "string to float", + field: ".Key.id", + typ: "float", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": "54"}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": 54.0}, + }, + }, { + name: "string to bool", + field: ".Key.id", + typ: "bool", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": "1"}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": true}, + }, + }, { + name: "string to string", + field: ".Key.id", + typ: "string", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": "54"}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": "54"}, + }, + }, + { + name: "int to int", + field: ".Key.id", + typ: "int", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 54}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": 54}, + }, + }, { + name: "int to float", + field: ".Key.id", + typ: "float", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 54}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": 54.0}, + }, + }, { + name: "int to bool", + field: ".Key.id", + typ: "bool", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 1}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": true}, + }, + }, { + name: "int to string", + field: ".Key.id", + typ: "string", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 54}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": "54"}, + }, + }, { + name: "float to int", + field: ".Key.id", + typ: "int", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 54.0}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": 54}, + }, + }, { + name: "float to float", + field: ".Key.id", + typ: "float", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 54.0}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": 54.0}, + }, + }, { + name: "float to bool", + field: ".Key.id", + typ: "bool", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 1.0}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": true}, + }, + }, { + name: "float to string", + field: ".Key.id", + typ: "string", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 54.0}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": "54"}, + }, + }, { + name: "bool to int", + field: ".Key.id", + typ: "int", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": true}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": 1}, + }, + }, { + name: "bool to float", + field: ".Key.id", + typ: "float", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": false}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": 0.0}, + }, + }, { + name: "bool to bool", + field: ".Key.id", + typ: "bool", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": true}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": true}, + }, + }, { + name: "bool to string", + field: ".Key.id", + typ: "string", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": false}, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"id": "false"}, + }, + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + proc.config.Type = tc.typ + proc.referenceResolver, err = sdk.NewReferenceResolver(tc.field) + is.NoErr(err) + output := proc.Process(ctx, []opencdc.Record{tc.record}) + is.True(len(output) == 1) + is.Equal(output[0], tc.want) + }) + } +} + +func TestConvertField_ProcessFail(t *testing.T) { + proc := NewConvertProcessor(log.Nop()).(*convertProcessor) + ctx := context.Background() + var err error + testCases := []struct { + name string + field string + typ string + record opencdc.Record + wantErr string + }{ + { + name: "string to int, int out of range", + field: ".Key.id", + typ: "int", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": "9999999999999999999"}, + }, + wantErr: "value out of range", + }, + { + name: "string to int, string is not a valid number", + field: ".Key.id", + typ: "int", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": "nan"}, + }, + wantErr: "invalid syntax", + }, { + name: "float to int, float is out of range", + field: ".Key.id", + typ: "int", + record: opencdc.Record{ + Key: opencdc.StructuredData{"id": 9999999999999999999.0}, + }, + wantErr: "value out of range", + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + proc.config.Type = tc.typ + proc.referenceResolver, err = sdk.NewReferenceResolver(tc.field) + is.NoErr(err) + output := proc.Process(ctx, []opencdc.Record{tc.record}) + is.True(len(output) == 1) + rec, ok := output[0].(sdk.ErrorRecord) + is.True(ok) + is.True(strings.Contains(rec.Error.Error(), tc.wantErr)) + }) + } +} + +func TestConvertField_Configure(t *testing.T) { + proc := NewConvertProcessor(log.Nop()) + ctx := context.Background() + testCases := []struct { + name string + cfg map[string]string + wantErr bool + }{ + { + name: "valid config", + cfg: map[string]string{"field": ".Payload.After.foo", "type": "int"}, + wantErr: false, + }, { + name: "invalid config, contains an invalid prefix for the field", + cfg: map[string]string{"field": ".Metadata.foo", "type": "int"}, + wantErr: true, + }, { + name: "invalid config, invalid prefix", + cfg: map[string]string{"field": "aPayload.foo", "type": "int"}, + wantErr: true, + }, { + name: "invalid config, invalid type", + cfg: map[string]string{"field": ".Key.foo", "type": "map"}, + wantErr: true, + }, { + name: "missing param", + cfg: map[string]string{}, + wantErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err := proc.Configure(ctx, tc.cfg) + if tc.wantErr { + is.True(err != nil) + return + } + is.NoErr(err) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/field/examples_exporter_test.go b/pkg/plugin/processor/builtin/impl/field/examples_exporter_test.go new file mode 100644 index 000000000..fd2e894f5 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/examples_exporter_test.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build export_processors + +package field + +import ( + "os" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +func TestMain(m *testing.M) { + code := m.Run() + if code > 0 { + os.Exit(code) + } + + // tests passed, export the processors + exampleutil.ExportProcessors() +} diff --git a/pkg/plugin/processor/builtin/impl/field/examples_test.go b/pkg/plugin/processor/builtin/impl/field/examples_test.go new file mode 100644 index 000000000..d3143e94b --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/examples_test.go @@ -0,0 +1,17 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go test -count=1 -tags export_processors . + +package field diff --git a/pkg/plugin/processor/builtin/impl/field/exclude.go b/pkg/plugin/processor/builtin/impl/field/exclude.go new file mode 100644 index 000000000..0ea279dab --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/exclude.go @@ -0,0 +1,105 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=exclude_paramgen.go excludeConfig + +package field + +import ( + "context" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" +) + +type excludeProcessor struct { + config excludeConfig + referenceResolvers []sdk.ReferenceResolver + + sdk.UnimplementedProcessor +} + +func NewExcludeProcessor(log.CtxLogger) sdk.Processor { + return &excludeProcessor{} +} + +type excludeConfig struct { + // Fields is a comma separated list of target fields, as they would be addressed in a Go template (e.g. `.Metadata,.Payload.After.foo`). + Fields []string `json:"fields" validate:"required"` +} + +func (p *excludeProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "field.exclude", + Summary: "Remove a subset of fields from the record.", + Description: `Remove a subset of fields from the record, all the other fields are left untouched. +If a field is excluded that contains nested data, the whole tree will be removed. +It is not allowed to exclude ` + ".Position" + ` or ` + ".Operation" + ` fields. + +Note that this processor only runs on structured data, if the record contains +raw JSON data, then use the processor [` + "json.decode" + `](/docs/processors/builtin/json.decode) +to parse it into structured data first.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: excludeConfig{}.Parameters(), + }, nil +} + +func (p *excludeProcessor) Configure(ctx context.Context, m map[string]string) error { + err := sdk.ParseConfig(ctx, m, &p.config, excludeConfig{}.Parameters()) + if err != nil { + return cerrors.Errorf("failed to parse configuration: %w", err) + } + p.referenceResolvers = make([]sdk.ReferenceResolver, len(p.config.Fields)) + for i, field := range p.config.Fields { + if field == internal.PositionReference || field == internal.OperationReference { + return cerrors.Errorf("it is not allowed to exclude the fields %q and %q", internal.OperationReference, internal.PositionReference) + } + p.referenceResolvers[i], err = sdk.NewReferenceResolver(field) + if err != nil { + return cerrors.Errorf("invalid reference: %w", err) + } + } + return nil +} + +func (p *excludeProcessor) Open(context.Context) error { + return nil +} + +func (p *excludeProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, record := range records { + rec := record + for i := range p.config.Fields { + ref, err := p.referenceResolvers[i].Resolve(&rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + err = ref.Delete() + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + } + out = append(out, sdk.SingleRecord(rec)) + } + return out +} + +func (p *excludeProcessor) Teardown(context.Context) error { + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/field/exclude_examples_test.go b/pkg/plugin/processor/builtin/impl/field/exclude_examples_test.go new file mode 100644 index 000000000..4fef8fc26 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/exclude_examples_test.go @@ -0,0 +1,119 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package field + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleExcludeProcessor_oneField() { + p := NewExcludeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Exclude all fields in payload", + Description: "Excluding all fields in `.Payload` results in an empty payload.", + Config: map[string]string{"fields": ".Payload"}, + Have: opencdc.Record{ + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar"}, Before: opencdc.StructuredData{"bar": "baz"}}, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}}, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,16 +1,12 @@ + // { + // "position": null, + // "operation": "create", + // "metadata": { + // "key1": "val1" + // }, + // "key": null, + // "payload": { + // - "before": { + // - "bar": "baz" + // - }, + // + "before": null, + // - "after": { + // - "foo": "bar" + // - } + // + "after": null + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleExcludeProcessor_multipleFields() { + p := NewExcludeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: `Exclude multiple fields`, + Description: `It's possible to exclude multiple fields by providing a +comma-separated list of fields. In this example, we exclude ` + ".Metadata" + `, +` + ".Payload.After.foo" + ` and ` + ".Key.key1" + `.`, + Config: map[string]string{"fields": ".Metadata,.Payload.After.foo,.Key.key1"}, + Have: opencdc.Record{ + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"source": "s3"}, + Key: opencdc.StructuredData{"key1": "val1", "key2": "val2"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar", "foobar": "baz"}, Before: opencdc.StructuredData{"bar": "baz"}}, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationCreate, + Metadata: map[string]string{}, + Key: opencdc.StructuredData{"key2": "val2"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foobar": "baz"}, Before: opencdc.StructuredData{"bar": "baz"}}, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,20 +1,16 @@ + // { + // "position": null, + // "operation": "create", + // - "metadata": { + // - "source": "s3" + // - }, + // + "metadata": {}, + // - "key": { + // - "key1": "val1", + // + "key": { + // "key2": "val2" + // }, + // "payload": { + // "before": { + // "bar": "baz" + // }, + // - "after": { + // - "foo": "bar", + // + "after": { + // "foobar": "baz" + // } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/field/exclude_paramgen.go b/pkg/plugin/processor/builtin/impl/field/exclude_paramgen.go new file mode 100644 index 000000000..72ed28693 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/exclude_paramgen.go @@ -0,0 +1,21 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package field + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (excludeConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "fields": { + Default: "", + Description: "Fields is a comma separated list of target fields, as they would be addressed in a Go template (e.g. `.Metadata,.Payload.After.foo`).", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/field/exclude_test.go b/pkg/plugin/processor/builtin/impl/field/exclude_test.go new file mode 100644 index 000000000..3649e77b3 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/exclude_test.go @@ -0,0 +1,96 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package field + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/matryer/is" +) + +func TestExcludeFields_Process(t *testing.T) { + is := is.New(t) + proc := NewExcludeProcessor(log.Nop()) + cfg := map[string]string{"fields": ".Metadata,.Payload.After.foo"} + ctx := context.Background() + records := []opencdc.Record{ + { + Metadata: map[string]string{"key1": "val1", "key2": "val2"}, + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "bar", + "keep": "me", + }, + }, + }, + } + want := sdk.SingleRecord{ + Metadata: map[string]string{}, + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "keep": "me", + }, + }, + } + err := proc.Configure(ctx, cfg) + is.NoErr(err) + output := proc.Process(context.Background(), records) + is.True(len(output) == 1) + is.Equal(output[0], want) +} + +func TestExcludeField_Configure(t *testing.T) { + proc := NewExcludeProcessor(log.Nop()) + ctx := context.Background() + testCases := []struct { + name string + cfg map[string]string + wantErr bool + }{ + { + name: "valid config", + cfg: map[string]string{"fields": ".Metadata,.Payload"}, + wantErr: false, + }, { + name: "missing parameter", + cfg: map[string]string{}, + wantErr: true, + }, { + name: "cannot exclude .Operation", + cfg: map[string]string{"fields": ".Operation"}, + wantErr: true, + }, { + name: "cannot exclude .Position", + cfg: map[string]string{"fields": ".Position"}, + wantErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err := proc.Configure(ctx, tc.cfg) + if tc.wantErr { + is.True(err != nil) + return + } + is.NoErr(err) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/field/rename.go b/pkg/plugin/processor/builtin/impl/field/rename.go new file mode 100644 index 000000000..36b587842 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/rename.go @@ -0,0 +1,134 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=rename_paramgen.go renameConfig + +package field + +import ( + "context" + "strings" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + "golang.org/x/exp/slices" +) + +type renameProcessor struct { + newNames []string + referenceResolvers []sdk.ReferenceResolver + + sdk.UnimplementedProcessor +} + +func NewRenameProcessor(log.CtxLogger) sdk.Processor { + return &renameProcessor{} +} + +type renameConfig struct { + // Mapping is a comma separated list of keys and values for fields and their new names (keys and values + // are separated by colons ":"). For example: `.Metadata.key:id,.Payload.After.foo:bar`. + Mapping []string `json:"mapping" validate:"required"` +} + +func (p *renameProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "field.rename", + Summary: "Rename a group of fields.", + Description: `Rename a group of field names to new names. It is not +allowed to rename top-level fields (` + "`.Operation`" + `, ` + "`.Position`" + `, +` + "`.Key`" + `, ` + "`.Metadata`" + `, ` + "`.Payload.Before`" + `, ` + "`.Payload.After`" + `). + +Note that this processor only runs on structured data, if the record contains raw +JSON data, then use the processor [` + "json.decode" + `](/docs/processors/builtin/json.decode) +to parse it into structured data first.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: renameConfig{}.Parameters(), + }, nil +} + +func (p *renameProcessor) Configure(ctx context.Context, m map[string]string) error { + var forbiddenFields = []string{ + internal.MetadataReference, + internal.PayloadReference, + internal.PayloadBeforeReference, + internal.PayloadAfterReference, + internal.PositionReference, + internal.KeyReference, + internal.OperationReference, + } + + cfg := renameConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, renameConfig{}.Parameters()) + if err != nil { + return cerrors.Errorf("failed to parse configuration: %w", err) + } + p.referenceResolvers = make([]sdk.ReferenceResolver, len(cfg.Mapping)) + p.newNames = make([]string, len(cfg.Mapping)) + for i, pair := range cfg.Mapping { + parts := strings.Split(pair, ":") + if len(parts) != 2 { + return cerrors.Errorf("wrong format for the %q param, should be a comma separated list of keys and values,"+ + "ex: .Metadata.key:id,.Payload.After.foo:bar", "mapping") + } + + key := strings.TrimSpace(parts[0]) + if slices.Contains(forbiddenFields, key) { + return cerrors.Errorf("cannot rename one of the top-level fields %q", key) + } + p.referenceResolvers[i], err = sdk.NewReferenceResolver(key) + if err != nil { + return cerrors.Errorf("invalid reference: %w", err) + } + + value := strings.TrimSpace(parts[1]) + if len(value) == 0 { + return cerrors.Errorf("cannot rename the key %q to an empty string", key) + } + p.newNames[i] = value + } + + return nil +} + +func (p *renameProcessor) Open(context.Context) error { + return nil +} + +func (p *renameProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, record := range records { + rec := record + for i, newName := range p.newNames { + ref, err := p.referenceResolvers[i].Resolve(&rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + _, err = ref.Rename(newName) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + } + out = append(out, sdk.SingleRecord(rec)) + } + return out +} + +func (p *renameProcessor) Teardown(context.Context) error { + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/field/rename_examples_test.go b/pkg/plugin/processor/builtin/impl/field/rename_examples_test.go new file mode 100644 index 000000000..06fc2147d --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/rename_examples_test.go @@ -0,0 +1,67 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package field + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleRenameProcessor_rename1() { + p := NewRenameProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: `Rename multiple fields`, + Description: `This example renames the fields in ` + ".Metadata" + ` and +` + ".Payload.After" + ` as specified in the ` + "mapping" + ` configuration parameter.`, + Config: map[string]string{"mapping": ".Metadata.key1:newKey,.Payload.After.foo:newFoo"}, + Have: opencdc.Record{ + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar"}, Before: opencdc.StructuredData{"bar": "baz"}}, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"newKey": "val1"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"newFoo": "bar"}, Before: opencdc.StructuredData{"bar": "baz"}}, + }}) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,16 +1,16 @@ + // { + // "position": null, + // "operation": "create", + // "metadata": { + // - "key1": "val1" + // + "newKey": "val1" + // }, + // "key": null, + // "payload": { + // "before": { + // "bar": "baz" + // }, + // "after": { + // - "foo": "bar" + // + "newFoo": "bar" + // } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/field/rename_paramgen.go b/pkg/plugin/processor/builtin/impl/field/rename_paramgen.go new file mode 100644 index 000000000..03917f233 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/rename_paramgen.go @@ -0,0 +1,21 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package field + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (renameConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "mapping": { + Default: "", + Description: "Mapping is a comma separated list of keys and values for fields and their new names (keys and values\nare separated by colons \":\"). For example: `.Metadata.key:id,.Payload.After.foo:bar`.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/field/rename_test.go b/pkg/plugin/processor/builtin/impl/field/rename_test.go new file mode 100644 index 000000000..65e98ac3f --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/rename_test.go @@ -0,0 +1,93 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package field + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/matryer/is" +) + +func TestRenameField_Process(t *testing.T) { + is := is.New(t) + proc := NewRenameProcessor(log.Nop()) + ctx := context.Background() + config := map[string]string{"mapping": ".Metadata.key1:newKey,.Payload.After.foo:newFoo"} + records := []opencdc.Record{ + { + Metadata: map[string]string{"key1": "val1", "key2": "val2"}, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }, + } + want := sdk.SingleRecord{ + Metadata: map[string]string{"newKey": "val1", "key2": "val2"}, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "newFoo": "bar", + }, + }, + } + err := proc.Configure(ctx, config) + is.NoErr(err) + output := proc.Process(context.Background(), records) + is.True(len(output) == 1) + is.Equal(output[0], want) +} + +func TestRenameField_Configure(t *testing.T) { + proc := NewRenameProcessor(log.Nop()) + ctx := context.Background() + testCases := []struct { + name string + cfg map[string]string + wantErr bool + }{ + { + name: "valid config", + cfg: map[string]string{"mapping": ".Payload.After.foo:bar"}, + wantErr: false, + }, { + name: "invalid config, contains a top-level reference", + cfg: map[string]string{"mapping": ".Metadata:foo,.Payload.After.foo:bar"}, + wantErr: true, + }, { + name: "mapping param is missing", + cfg: map[string]string{}, + wantErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err := proc.Configure(ctx, tc.cfg) + if tc.wantErr { + is.True(err != nil) + return + } + is.NoErr(err) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/field/set.go b/pkg/plugin/processor/builtin/impl/field/set.go new file mode 100644 index 000000000..e3c50adc4 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/set.go @@ -0,0 +1,115 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=set_paramgen.go setConfig + +package field + +import ( + "bytes" + "context" + "text/template" + + "github.com/Masterminds/sprig/v3" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" +) + +type setProcessor struct { + referenceResolver sdk.ReferenceResolver + tmpl *template.Template + + sdk.UnimplementedProcessor +} + +func NewSetProcessor(log.CtxLogger) sdk.Processor { + return &setProcessor{} +} + +type setConfig struct { + // Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`). + // Note that it is not allowed to set the .Position field. + Field string `json:"field" validate:"required,exclusion=.Position"` + // Value is a Go template expression which will be evaluated and stored in `field` (e.g. `{{ .Payload.After }}`). + Value string `json:"value" validate:"required"` +} + +func (p *setProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "field.set", + Summary: "Set the value of a certain field.", + Description: `Set the value of a certain field to any value. It is not allowed to set the .Position field. +The new value can be a Go template expression, the processor will evaluate the output and assign the value to the target field. +If the "field" provided doesn't exist, the processor will create that field and assign its value. +This processor can be used for multiple purposes, like extracting fields, hoisting data, inserting fields, copying fields, masking fields, etc. +Note that this processor only runs on structured data, if the record contains raw JSON data, then use the processor +[` + "json.decode" + `](/docs/processors/builtin/json.decode) to parse it into structured data first.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: setConfig{}.Parameters(), + }, nil +} + +func (p *setProcessor) Configure(ctx context.Context, m map[string]string) error { + cfg := setConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, setConfig{}.Parameters()) + if err != nil { + return cerrors.Errorf("failed to parse configuration: %w", err) + } + + tmpl, err := template.New("").Funcs(sprig.FuncMap()).Parse(cfg.Value) + if err != nil { + return cerrors.Errorf("failed to parse the %q param template: %w", "value", err) + } + p.tmpl = tmpl + resolver, err := sdk.NewReferenceResolver(cfg.Field) + if err != nil { + return cerrors.Errorf("failed to parse the %q param: %w", "field", err) + } + p.referenceResolver = resolver + return nil +} + +func (p *setProcessor) Open(context.Context) error { + return nil +} + +func (p *setProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, record := range records { + rec := record + var b bytes.Buffer + // evaluate the new value + err := p.tmpl.Execute(&b, rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + ref, err := p.referenceResolver.Resolve(&rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + err = ref.Set(b.String()) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, sdk.SingleRecord(rec)) + } + return out +} + +func (p *setProcessor) Teardown(context.Context) error { + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/field/set_examples_test.go b/pkg/plugin/processor/builtin/impl/field/set_examples_test.go new file mode 100644 index 000000000..d680fd833 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/set_examples_test.go @@ -0,0 +1,135 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package field + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleSetProcessor_setOperation() { + p := NewSetProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Sets the record operation to `update`", + Description: "This example sets the `.Operation` field to `update` for all records.", + Config: map[string]string{"field": ".Operation", "value": "update"}, + Have: opencdc.Record{Operation: opencdc.OperationCreate}, + Want: sdk.SingleRecord{Operation: opencdc.OperationUpdate}, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,10 +1,10 @@ + // { + // "position": null, + // - "operation": "create", + // + "operation": "update", + // "metadata": null, + // "key": null, + // "payload": { + // "before": null, + // "after": null + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleSetProcessor_addField() { + p := NewSetProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: `Add field`, + Description: `This example adds a new field to the record. The field is +added to ` + ".Payload.After" + ` and is set to ` + "bar" + `.`, + Config: map[string]string{"field": ".Payload.After.foo", "value": "bar"}, + Have: opencdc.Record{Operation: opencdc.OperationSnapshot, + Key: opencdc.StructuredData{"my-key": "id"}, + }, + Want: sdk.SingleRecord{ + Key: opencdc.StructuredData{"my-key": "id"}, + Operation: opencdc.OperationSnapshot, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar"}}, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,12 +1,14 @@ + // { + // "position": null, + // "operation": "snapshot", + // "metadata": null, + // "key": { + // "my-key": "id" + // }, + // "payload": { + // "before": null, + // - "after": null + // + "after": { + // + "foo": "bar" + // + } + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleSetProcessor_template() { + p := NewSetProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: `Set field using Go template`, + Description: "This example sets the `.Payload.After.postgres` field to `true` if the `.Metadata.table` field contains `postgres`.", + Config: map[string]string{"field": ".Payload.After.postgres", "value": "{{ eq .Metadata.table \"postgres\" }}"}, + Have: opencdc.Record{ + Metadata: map[string]string{"table": "postgres"}, + Operation: opencdc.OperationSnapshot, + Payload: opencdc.Change{After: opencdc.StructuredData{"postgres": "false"}}, + }, + Want: sdk.SingleRecord{ + Metadata: map[string]string{"table": "postgres"}, + Operation: opencdc.OperationSnapshot, + Payload: opencdc.Change{After: opencdc.StructuredData{"postgres": "true"}}, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,14 @@ + // { + // "position": null, + // "operation": "snapshot", + // "metadata": { + // "table": "postgres" + // }, + // "key": null, + // "payload": { + // "before": null, + // "after": { + // - "postgres": "false" + // + "postgres": "true" + // } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/field/set_paramgen.go b/pkg/plugin/processor/builtin/impl/field/set_paramgen.go new file mode 100644 index 000000000..88bc19510 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/set_paramgen.go @@ -0,0 +1,30 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package field + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (setConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: "", + Description: "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nNote that it is not allowed to set the .Position field.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + config.ValidationExclusion{List: []string{".Position"}}, + }, + }, + "value": { + Default: "", + Description: "Value is a Go template expression which will be evaluated and stored in `field` (e.g. `{{ .Payload.After }}`).", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/field/set_test.go b/pkg/plugin/processor/builtin/impl/field/set_test.go new file mode 100644 index 000000000..22f0db0b5 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/field/set_test.go @@ -0,0 +1,147 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package field + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/matryer/is" +) + +func TestSetField_Process(t *testing.T) { + proc := NewSetProcessor(log.Nop()) + var err error + ctx := context.Background() + testCases := []struct { + name string + config map[string]string + record opencdc.Record + want sdk.SingleRecord + }{ + { + name: "setting a metadata field", + config: map[string]string{"field": ".Metadata.table", "value": "postgres"}, + record: opencdc.Record{ + Metadata: map[string]string{"table": "my-table"}, + }, + want: sdk.SingleRecord{ + Metadata: map[string]string{"table": "postgres"}, + }, + }, + { + name: "setting a non existent field", + config: map[string]string{"field": ".Metadata.nonExistent", "value": "postgres"}, + record: opencdc.Record{ + Metadata: map[string]string{"table": "my-table"}, + }, + want: sdk.SingleRecord{ + Metadata: map[string]string{"table": "my-table", "nonExistent": "postgres"}, + }, + }, + { + name: "setting the operation field", + config: map[string]string{"field": ".Operation", "value": "delete"}, + record: opencdc.Record{ + Operation: opencdc.OperationCreate, + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationDelete, + }, + }, { + name: "setting the payload.after with a go template evaluated value", + config: map[string]string{"field": ".Payload.After.foo", "value": "{{ .Payload.After.baz }}"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "foo": "bar", + "baz": "bar2", + }, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "foo": "bar2", + "baz": "bar2", + }, + }, + }, + }} + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err = proc.Configure(ctx, tc.config) + is.NoErr(err) + output := proc.Process(ctx, []opencdc.Record{tc.record}) + is.True(len(output) == 1) + is.Equal(output[0], tc.want) + }) + } +} + +func TestSetField_Configure(t *testing.T) { + proc := NewSetProcessor(log.Nop()) + ctx := context.Background() + testCases := []struct { + name string + cfg map[string]string + wantErr bool + }{ + { + name: "valid config", + cfg: map[string]string{"field": ".Metadata", "value": "{{ .Payload.After.foo }}"}, + wantErr: false, + }, + { + name: "invalid value template format", + cfg: map[string]string{"field": ".Metadata", "value": "{{ invalid }}"}, + wantErr: true, + }, { + name: "value param is missing", + cfg: map[string]string{"field": ".Metadata"}, + wantErr: true, + }, { + name: "field param is missing", + cfg: map[string]string{"value": "sth"}, + wantErr: true, + }, { + name: "cannot set .Position", + cfg: map[string]string{"field": ".Position", "value": "newPos"}, + wantErr: true, + }, { + name: "all params are missing", + cfg: map[string]string{}, + wantErr: true, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err := proc.Configure(ctx, tc.cfg) + if tc.wantErr { + is.True(err != nil) + return + } + is.NoErr(err) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/filter.go b/pkg/plugin/processor/builtin/impl/filter.go new file mode 100644 index 000000000..6c033a44a --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/filter.go @@ -0,0 +1,65 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + + "github.com/conduitio/conduit-commons/config" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" +) + +type filterProcessor struct { + sdk.UnimplementedProcessor +} + +func NewFilterProcessor(log.CtxLogger) sdk.Processor { + return &filterProcessor{} +} + +func (p *filterProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "filter", + Summary: "Acknowledges all records that get passed to the filter.", + Description: `Acknowledges all records that get passed to the filter, so the records will be filtered out if +the condition provided to the processor is evaluated to "true". +Make sure to add a condition to this processor, otherwise all records will be filtered out."`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: map[string]config.Parameter{}, + }, nil +} + +func (p *filterProcessor) Configure(_ context.Context, _ map[string]string) error { + return nil +} + +func (p *filterProcessor) Open(context.Context) error { + return nil +} + +func (p *filterProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, len(records)) + for i := range records { + out[i] = sdk.FilterRecord{} + } + return out +} + +func (p *filterProcessor) Teardown(context.Context) error { + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/filter_examples_test.go b/pkg/plugin/processor/builtin/impl/filter_examples_test.go new file mode 100644 index 000000000..01a561e5e --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/filter_examples_test.go @@ -0,0 +1,40 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // we're using a more descriptive name of example +func ExampleFilterProcessor() { + p := NewFilterProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: `Filter out the record`, + Config: map[string]string{}, + Have: opencdc.Record{ + Operation: opencdc.OperationCreate, + Metadata: map[string]string{"key1": "val1"}, + Payload: opencdc.Change{After: opencdc.StructuredData{"foo": "bar"}, Before: opencdc.StructuredData{"bar": "baz"}}, + }, + Want: sdk.FilterRecord{}}) + + // Output: + // processor filtered record out +} diff --git a/pkg/plugin/processor/builtin/impl/filter_test.go b/pkg/plugin/processor/builtin/impl/filter_test.go new file mode 100644 index 000000000..b1f7029cd --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/filter_test.go @@ -0,0 +1,47 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package impl + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/matryer/is" +) + +func TestFilter_Process(t *testing.T) { + is := is.New(t) + proc := NewFilterProcessor(log.Nop()) + records := []opencdc.Record{ + { + Metadata: map[string]string{"key1": "val1"}, + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }, + { + Metadata: map[string]string{"key2": "val2"}, + Payload: opencdc.Change{}, + }, + } + want := []sdk.ProcessedRecord{sdk.FilterRecord{}, sdk.FilterRecord{}} + output := proc.Process(context.Background(), records) + is.Equal(output, want) +} diff --git a/pkg/plugin/processor/builtin/impl/json/decode.go b/pkg/plugin/processor/builtin/impl/json/decode.go new file mode 100644 index 000000000..cfa453e7c --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/decode.go @@ -0,0 +1,134 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=decode_paramgen.go decodeConfig + +package json + +import ( + "context" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/goccy/go-json" +) + +type decodeProcessor struct { + sdk.UnimplementedProcessor + + referenceResolver sdk.ReferenceResolver +} + +func NewDecodeProcessor(log.CtxLogger) sdk.Processor { + return &decodeProcessor{} +} + +type decodeConfig struct { + // Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`). + // you can only decode fields that are under .Key and .Payload. + Field string `json:"field" validate:"required,regex=^\\.(Payload|Key).*,exclusion=.Payload"` +} + +func (p *decodeProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "json.decode", + Summary: "Decodes a specific field from JSON raw data (string) to structured data.", + Description: `The processor takes JSON raw data (` + "`string`" + ` or ` + "`[]byte`" + `) +from the target field, parses it as JSON structured data and stores the decoded +structured data in the target field. + +This processor is only applicable to fields under ` + "`.Key`" + `, ` + "`.Payload`.Before" + ` and +` + "`.Payload.After`" + `, as they can contain structured data.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: decodeConfig{}.Parameters(), + }, nil +} + +func (p *decodeProcessor) Configure(ctx context.Context, m map[string]string) error { + cfg := decodeConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, decodeConfig{}.Parameters()) + if err != nil { + return cerrors.Errorf("failed to parse configuration: %w", err) + } + resolver, err := sdk.NewReferenceResolver(cfg.Field) + if err != nil { + return cerrors.Errorf(`failed to parse the "field" parameter: %w`, err) + } + p.referenceResolver = resolver + return nil +} + +func (p *decodeProcessor) Open(context.Context) error { + return nil +} + +func (p *decodeProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, record := range records { + rec := record + ref, err := p.referenceResolver.Resolve(&rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + + data := ref.Get() + switch d := data.(type) { + case opencdc.RawData: + bytes := d.Bytes() + err := p.setJSONData(bytes, ref) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + case string: + bytes := []byte(d) + err := p.setJSONData(bytes, ref) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + case []byte: + err := p.setJSONData(d, ref) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + case opencdc.StructuredData, map[string]any: + // data is already structured + case nil: + // if the field is nil leave it as it is + default: + return append(out, sdk.ErrorRecord{Error: cerrors.Errorf("unexpected data type %T", data)}) + } + out = append(out, sdk.SingleRecord(rec)) + } + return out +} + +func (p *decodeProcessor) Teardown(context.Context) error { + return nil +} + +func (p *decodeProcessor) setJSONData(bytes []byte, ref sdk.Reference) error { + if len(bytes) == 0 { + // value is an empty json + return ref.Set(nil) + } + var jsonData any + err := json.Unmarshal(bytes, &jsonData) + if err != nil { + return cerrors.Errorf("failed to unmarshal raw data as JSON: %w", err) + } + return ref.Set(jsonData) +} diff --git a/pkg/plugin/processor/builtin/impl/json/decode_examples_test.go b/pkg/plugin/processor/builtin/impl/json/decode_examples_test.go new file mode 100644 index 000000000..9f4f6fd16 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/decode_examples_test.go @@ -0,0 +1,121 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package json + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleDecodeProcessor_rawKey() { + p := NewDecodeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: `Decode record key as JSON`, + Description: `This example takes a record containing a raw JSON string in +` + ".Key" + ` and converts it into structured data.`, + Config: map[string]string{"field": ".Key"}, + Have: opencdc.Record{ + Operation: opencdc.OperationCreate, + Key: opencdc.RawData(`{"after":{"data":4,"id":3}}`), + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationCreate, + Key: opencdc.StructuredData{ + "after": map[string]interface{}{"data": float64(4), "id": float64(3)}, + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,10 +1,15 @@ + // { + // "position": null, + // "operation": "create", + // "metadata": null, + // - "key": "{\"after\":{\"data\":4,\"id\":3}}", + // + "key": { + // + "after": { + // + "data": 4, + // + "id": 3 + // + } + // + }, + // "payload": { + // "before": null, + // "after": null + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleDecodeProcessor_rawPayloadField() { + p := NewDecodeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Decode nested field as JSON", + Description: `This example takes a record containing a raw JSON string in +` + ".Payload.Before.foo" + ` and converts it into a map.`, + Config: map[string]string{"field": ".Payload.Before.foo"}, + Have: opencdc.Record{ + Operation: opencdc.OperationSnapshot, + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "foo": `{"before":{"data":4,"id":3},"baz":"bar"}`, + }, + }, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationSnapshot, + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "foo": map[string]any{ + "before": map[string]any{"data": float64(4), "id": float64(3)}, + "baz": "bar", + }, + }, + }, + }}) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,12 +1,18 @@ + // { + // "position": null, + // "operation": "snapshot", + // "metadata": null, + // "key": null, + // "payload": { + // "before": { + // - "foo": "{\"before\":{\"data\":4,\"id\":3},\"baz\":\"bar\"}" + // + "foo": { + // + "baz": "bar", + // + "before": { + // + "data": 4, + // + "id": 3 + // + } + // + } + // }, + // "after": null + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/json/decode_paramgen.go b/pkg/plugin/processor/builtin/impl/json/decode_paramgen.go new file mode 100644 index 000000000..0f2980349 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/decode_paramgen.go @@ -0,0 +1,25 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package json + +import ( + "regexp" + + "github.com/conduitio/conduit-commons/config" +) + +func (decodeConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: "", + Description: "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nyou can only decode fields that are under .Key and .Payload.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + config.ValidationRegex{Regex: regexp.MustCompile("^\\.(Payload|Key).*")}, + config.ValidationExclusion{List: []string{".Payload"}}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/json/decode_test.go b/pkg/plugin/processor/builtin/impl/json/decode_test.go new file mode 100644 index 000000000..aa8464654 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/decode_test.go @@ -0,0 +1,231 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package json + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + "github.com/google/go-cmp/cmp" + "github.com/matryer/is" +) + +func TestDecodeJSON_Process(t *testing.T) { + proc := NewDecodeProcessor(log.Nop()) + ctx := context.Background() + testCases := []struct { + name string + config map[string]string + record opencdc.Record + want sdk.ProcessedRecord + }{ + { + name: "raw key to structured", + config: map[string]string{"field": ".Key"}, + record: opencdc.Record{ + Key: opencdc.RawData(`{"after":{"data":4,"id":3}}`), + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{ + "after": map[string]any{"data": float64(4), "id": float64(3)}, + }, + }, + }, { + name: "raw payload.after.foo to structured", + config: map[string]string{"field": ".Payload.After.foo"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": `{"after":{"data":4,"id":3},"baz":"bar"}`, + }, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": map[string]any{ + "after": map[string]any{"data": float64(4), "id": float64(3)}, + "baz": "bar", + }, + }, + }, + }, + }, { + name: "slice payload.after.foo to structured", + config: map[string]string{"field": ".Payload.After.foo"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": `["one", "two", "three"]`, + }, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": []any{"one", "two", "three"}, + }, + }, + }, + }, { + name: "string JSON value payload.after.foo to structured", + config: map[string]string{"field": ".Payload.After.foo"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": `"bar"`, + }, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "bar", + }, + }, + }, + }, { + name: "raw payload.before to structured", + config: map[string]string{"field": ".Payload.Before"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + Before: opencdc.RawData(`{"before":{"data":4},"foo":"bar"}`), + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "before": map[string]any{"data": float64(4)}, + "foo": "bar", + }, + }, + }, + }, { + name: "already structured data", + config: map[string]string{"field": ".Key"}, + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "after": map[string]any{"data": float64(4), "id": float64(3)}, + }, + }, + want: sdk.SingleRecord{ + Key: opencdc.StructuredData{ + "after": map[string]any{"data": float64(4), "id": float64(3)}, + }, + }, + }, { + name: "empty raw data converted to empty structured data", + config: map[string]string{"field": ".Key"}, + record: opencdc.Record{ + Key: opencdc.RawData(""), + }, + want: sdk.SingleRecord{ + Key: nil, + }, + }, { + name: "nil value", + config: map[string]string{"field": ".Payload.After"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + After: nil, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + After: nil, + }, + }, + }, { + name: "invalid json", + config: map[string]string{"field": ".Key"}, + record: opencdc.Record{ + Key: opencdc.RawData(`"invalid":"json"`), + }, + want: sdk.ErrorRecord{Error: cerrors.New("failed to unmarshal raw data as JSON: invalid character ':' after top-level value")}, + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err := proc.Configure(ctx, tc.config) + is.NoErr(err) + got := proc.Process(ctx, []opencdc.Record{tc.record}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(tc.want, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} + +func TestDecodeJSON_Configure(t *testing.T) { + proc := NewDecodeProcessor(log.Nop()) + ctx := context.Background() + testCases := []struct { + name string + config map[string]string + wantErr bool + }{ + { + name: "valid field, key", + config: map[string]string{"field": ".Key"}, + wantErr: false, + }, + { + name: "valid field, payload.after", + config: map[string]string{"field": ".Payload.After"}, + wantErr: false, + }, + { + name: "valid field, payload.before", + config: map[string]string{"field": ".Payload.Before"}, + wantErr: false, + }, + { + name: "valid field, payload.after.foo", + config: map[string]string{"field": ".Payload.After.foo"}, + wantErr: false, + }, + { + name: "invalid config, missing param", + config: map[string]string{}, + wantErr: true, + }, + { + name: "invalid field .Metadata", + config: map[string]string{"field": ".Metadata"}, + wantErr: true, + }, + { + name: "invalid field, .Payload", + config: map[string]string{"field": ".Payload"}, + wantErr: true, + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err := proc.Configure(ctx, tc.config) + if tc.wantErr { + is.True(err != nil) + return + } + is.NoErr(err) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/json/encode.go b/pkg/plugin/processor/builtin/impl/json/encode.go new file mode 100644 index 000000000..9604b8b72 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/encode.go @@ -0,0 +1,114 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=encode_paramgen.go encodeConfig + +package json + +import ( + "context" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/goccy/go-json" +) + +type encodeProcessor struct { + referenceResolver sdk.ReferenceResolver + + sdk.UnimplementedProcessor +} + +func NewEncodeProcessor(log.CtxLogger) sdk.Processor { + return &encodeProcessor{} +} + +type encodeConfig struct { + // Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`). + // you can only encode fields that are under .Key, .Payload.Before and .Payload.After. + Field string `json:"field" validate:"required,regex=^\\.(Payload|Key).*,exclusion=.Payload"` +} + +func (p *encodeProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "json.encode", + Summary: "Encodes a specific field from structured data to JSON raw data (string).", + Description: `The processor takes data from the target field, encodes it into s JSON value +and stores the encoded value in the target field. + +This processor is only applicable to fields under ` + "`.Key`" + `, ` + "`.Payload`.Before" + ` and +` + "`.Payload.After`" + `, as they can contain structured data.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: encodeConfig{}.Parameters(), + }, nil +} + +func (p *encodeProcessor) Configure(ctx context.Context, m map[string]string) error { + cfg := encodeConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, encodeConfig{}.Parameters()) + if err != nil { + return cerrors.Errorf("failed to parse configuration: %w", err) + } + resolver, err := sdk.NewReferenceResolver(cfg.Field) + if err != nil { + return cerrors.Errorf(`failed to parse the "field" parameter: %w`, err) + } + p.referenceResolver = resolver + return nil +} + +func (p *encodeProcessor) Open(context.Context) error { + return nil +} + +func (p *encodeProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, record := range records { + rec, err := p.encode(record) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, rec) + } + return out +} + +func (p *encodeProcessor) Teardown(context.Context) error { + return nil +} + +func (p *encodeProcessor) encode(rec opencdc.Record) (sdk.ProcessedRecord, error) { + ref, err := p.referenceResolver.Resolve(&rec) + if err != nil { + return nil, cerrors.Errorf("failed to resolve the field: %w", err) + } + valIn := ref.Get() + if valIn == nil { + // do not encode nil values + return sdk.SingleRecord(rec), nil + } + + valOut, err := json.Marshal(valIn) + if err != nil { + return nil, err + } + err = ref.Set(string(valOut)) + if err != nil { + return nil, cerrors.Errorf("failed to set the JSON encoded value into the record: %w", err) + } + return sdk.SingleRecord(rec), nil +} diff --git a/pkg/plugin/processor/builtin/impl/json/encode_examples_test.go b/pkg/plugin/processor/builtin/impl/json/encode_examples_test.go new file mode 100644 index 000000000..0946e50e4 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/encode_examples_test.go @@ -0,0 +1,121 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package json + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // a more descriptive example description +func ExampleEncodeProcessor_structuredKey() { + p := NewEncodeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Encode record key to JSON", + Description: `This example takes a record containing structured data in +` + ".Key" + ` and converts it into a raw JSON string.`, + Config: map[string]string{"field": ".Key"}, + Have: opencdc.Record{ + Operation: opencdc.OperationCreate, + Key: opencdc.StructuredData{ + "tables": []string{"table1,table2"}, + }, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationCreate, + Key: opencdc.RawData(`{"tables":["table1,table2"]}`), + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,10 @@ + // { + // "position": null, + // "operation": "create", + // "metadata": null, + // - "key": { + // - "tables": [ + // - "table1,table2" + // - ] + // - }, + // + "key": "{\"tables\":[\"table1,table2\"]}", + // "payload": { + // "before": null, + // "after": null + // } + // } +} + +//nolint:govet // a more descriptive example description +func ExampleEncodeProcessor_mapToJSON() { + p := NewEncodeProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Encode nested field to JSON", + Description: `This example takes a record containing a map in +` + ".Payload.Before.foo" + ` and converts it into a raw JSON string.`, + Config: map[string]string{"field": ".Payload.Before.foo"}, + Have: opencdc.Record{ + Operation: opencdc.OperationSnapshot, + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "foo": map[string]any{ + "before": map[string]any{"data": float64(4), "id": float64(3)}, + "baz": "bar", + }, + }, + }, + }, + Want: sdk.SingleRecord{ + Operation: opencdc.OperationSnapshot, + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "foo": `{"baz":"bar","before":{"data":4,"id":3}}`, + }, + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,18 +1,12 @@ + // { + // "position": null, + // "operation": "snapshot", + // "metadata": null, + // "key": null, + // "payload": { + // "before": { + // - "foo": { + // - "baz": "bar", + // - "before": { + // - "data": 4, + // - "id": 3 + // - } + // - } + // + "foo": "{\"baz\":\"bar\",\"before\":{\"data\":4,\"id\":3}}" + // }, + // "after": null + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/json/encode_paramgen.go b/pkg/plugin/processor/builtin/impl/json/encode_paramgen.go new file mode 100644 index 000000000..252d91437 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/encode_paramgen.go @@ -0,0 +1,25 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package json + +import ( + "regexp" + + "github.com/conduitio/conduit-commons/config" +) + +func (encodeConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: "", + Description: "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nyou can only encode fields that are under .Key, .Payload.Before and .Payload.After.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + config.ValidationRegex{Regex: regexp.MustCompile("^\\.(Payload|Key).*")}, + config.ValidationExclusion{List: []string{".Payload"}}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/json/encode_test.go b/pkg/plugin/processor/builtin/impl/json/encode_test.go new file mode 100644 index 000000000..884cfb7cb --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/encode_test.go @@ -0,0 +1,179 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package json + +import ( + "context" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + "github.com/google/go-cmp/cmp" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/matryer/is" +) + +func TestEncode_Process(t *testing.T) { + proc := NewEncodeProcessor(log.Nop()) + ctx := context.Background() + testCases := []struct { + name string + config map[string]string + record opencdc.Record + want sdk.ProcessedRecord + }{ + { + name: "structured key to raw data", + config: map[string]string{"field": ".Key"}, + record: opencdc.Record{ + Key: opencdc.StructuredData{ + "after": map[string]any{"data": float64(4), "id": float64(3)}, + }, + }, + want: sdk.SingleRecord{ + Key: opencdc.RawData(`{"after":{"data":4,"id":3}}`), + }, + }, { + name: "encode payload.after.foo map", + config: map[string]string{"field": ".Payload.After.foo"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": map[string]any{ + "after": map[string]any{"data": float64(4), "id": float64(3)}, + "baz": "bar", + }, + }, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": `{"after":{"data":4,"id":3},"baz":"bar"}`, + }, + }, + }, + }, { + name: "slice under payload.after to raw", + config: map[string]string{"field": ".Payload.After"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": []any{"one", "two", "three"}, + }, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.RawData(`{"foo":["one","two","three"]}`), + }, + }, + }, { + name: "encode int value", + config: map[string]string{"field": ".Payload.After.foo"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": 123, + }, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "foo": "123", + }, + }, + }, + }, { + name: "nil value", + config: map[string]string{"field": ".Payload.After"}, + record: opencdc.Record{ + Payload: opencdc.Change{ + After: nil, + }, + }, + want: sdk.SingleRecord{ + Payload: opencdc.Change{ + After: nil, + }, + }, + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err := proc.Configure(ctx, tc.config) + is.NoErr(err) + got := proc.Process(ctx, []opencdc.Record{tc.record}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(tc.want, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} + +func TestEncode_Configure(t *testing.T) { + proc := NewEncodeProcessor(log.Nop()) + ctx := context.Background() + testCases := []struct { + name string + config map[string]string + wantErr bool + }{ + { + name: "valid field, key", + config: map[string]string{"field": ".Key"}, + wantErr: false, + }, + { + name: "valid field, payload.after", + config: map[string]string{"field": ".Payload.After"}, + wantErr: false, + }, + { + name: "valid field, payload.after.foo", + config: map[string]string{"field": ".Payload.After.foo"}, + wantErr: false, + }, + { + name: "invalid config, missing param", + config: map[string]string{}, + wantErr: true, + }, + { + name: "invalid field .Metadata", + config: map[string]string{"field": ".Metadata"}, + wantErr: true, + }, + { + name: "invalid field, .Payload", + config: map[string]string{"field": ".Payload"}, + wantErr: true, + }, + } + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + err := proc.Configure(ctx, tc.config) + if tc.wantErr { + is.True(err != nil) + return + } + is.NoErr(err) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/json/examples_exporter_test.go b/pkg/plugin/processor/builtin/impl/json/examples_exporter_test.go new file mode 100644 index 000000000..5675c505c --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/examples_exporter_test.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build export_processors + +package json + +import ( + "os" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +func TestMain(m *testing.M) { + code := m.Run() + if code > 0 { + os.Exit(code) + } + + // tests passed, export the processors + exampleutil.ExportProcessors() +} diff --git a/pkg/plugin/processor/builtin/impl/json/examples_test.go b/pkg/plugin/processor/builtin/impl/json/examples_test.go new file mode 100644 index 000000000..920708ad0 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/json/examples_test.go @@ -0,0 +1,17 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go test -count=1 -tags export_processors . + +package json diff --git a/pkg/plugin/processor/builtin/impl/unwrap/debezium.go b/pkg/plugin/processor/builtin/impl/unwrap/debezium.go new file mode 100644 index 000000000..cd08a1b6d --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/debezium.go @@ -0,0 +1,334 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=debezium_paramgen.go debeziumConfig + +package unwrap + +import ( + "context" + "fmt" + "time" + + "github.com/conduitio/conduit-commons/config" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/foundation/multierror" + "github.com/goccy/go-json" +) + +const ( + debeziumOpCreate = "c" + debeziumOpUpdate = "u" + debeziumOpDelete = "d" + debeziumOpRead = "r" // snapshot + debeziumOpUnset = "$unset" // mongoDB unset operation + + debeziumFieldBefore = "before" + debeziumFieldAfter = "after" + debeziumFieldSource = "source" + debeziumFieldOp = "op" + debeziumFieldTimestamp = "ts_ms" +) + +type debeziumConfig struct { + // TODO change link to docs + + // Field is a reference to the field which contains the Debezium record. + // + // For more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66. + Field string `json:"field" validate:"regex=^.Payload" default:".Payload.After"` +} + +type debeziumProcessor struct { + sdk.UnimplementedProcessor + + logger log.CtxLogger + fieldRefRes sdk.ReferenceResolver +} + +func NewDebeziumProcessor(logger log.CtxLogger) sdk.Processor { + return &debeziumProcessor{logger: logger} +} + +func (d *debeziumProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "unwrap.debezium", + Summary: "Unwraps a Debezium record from the input OpenCDC record.", + Description: `In this processor, the wrapped (Debezium) record replaces the wrapping record (being processed) +completely, except for the position. + +The Debezium record's metadata and the wrapping record's metadata is merged, with the Debezium metadata having precedence. + +This is useful in cases where Conduit acts as an intermediary between a Debezium source and a Debezium destination. +In such cases, the Debezium record is set as the OpenCDC record's payload, and needs to be unwrapped for further usage.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: debeziumConfig{}.Parameters(), + }, nil +} + +func (d *debeziumProcessor) Configure(_ context.Context, m map[string]string) error { + cfg := debeziumConfig{} + inputCfg := config.Config(m).Sanitize().ApplyDefaults(cfg.Parameters()) + err := inputCfg.Validate(cfg.Parameters()) + if err != nil { + return cerrors.Errorf("invalid configuration: %w", err) + } + + err = inputCfg.DecodeInto(&cfg) + if err != nil { + return cerrors.Errorf("failed decoding configuration: %w", err) + } + + rr, err := sdk.NewReferenceResolver(cfg.Field) + if err != nil { + return cerrors.Errorf("invalid reference: %w", err) + } + + d.fieldRefRes = rr + return nil +} + +func (d *debeziumProcessor) Open(context.Context) error { + return nil +} + +func (d *debeziumProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + proc, err := d.processRecord(rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, proc) + } + + return out +} + +func (d *debeziumProcessor) Teardown(context.Context) error { + return nil +} + +func (d *debeziumProcessor) processRecord(rec opencdc.Record) (sdk.ProcessedRecord, error) { + // record must be structured + ref, err := d.fieldRefRes.Resolve(&rec) + if err != nil { + return nil, cerrors.Errorf("failed resolving reference: %w", err) + } + + var debeziumEvent opencdc.StructuredData + switch d := ref.Get().(type) { + case opencdc.StructuredData: + debeziumEvent = d + case map[string]any: + debeziumEvent = d + case opencdc.RawData: + err := json.Unmarshal(d.Bytes(), &debeziumEvent) + if err != nil { + return nil, cerrors.Errorf("failed unmarshalling JSON from raw data: %w", err) + } + case string: + err := json.Unmarshal([]byte(d), &debeziumEvent) + if err != nil { + return nil, cerrors.Errorf("failed unmarshalling JSON from string: %w", err) + } + default: + return nil, cerrors.Errorf("unexpected data type %T", ref.Get()) + } + + // get payload + debeziumRec, ok := debeziumEvent["payload"].(map[string]any) // the payload has the debezium record + if !ok { + return nil, cerrors.New("data to be unwrapped doesn't contain a payload field") + } + + // check fields under payload + err = d.validateRecord(debeziumRec) + if err != nil { + return nil, cerrors.Errorf("invalid record: %w", err) + } + + before, err := d.valueToData(debeziumRec[debeziumFieldBefore]) + if err != nil { + return nil, cerrors.Errorf("failed to parse field %s: %w", debeziumFieldBefore, err) + } + + after, err := d.valueToData(debeziumRec[debeziumFieldAfter]) + if err != nil { + return nil, cerrors.Errorf("failed to parse field %s: %w", debeziumFieldAfter, err) + } + + op, ok := debeziumRec[debeziumFieldOp].(string) + if !ok { + return nil, cerrors.Errorf("%s operation is not a string", op) + } + + operation, err := d.convertOperation(op) + if err != nil { + return nil, cerrors.Errorf("error unwrapping operation: %w", err) + } + + metadata, err := d.unwrapMetadata(rec, debeziumRec) + if err != nil { + return nil, cerrors.Errorf("error unwrapping metadata: %w", err) + } + + return sdk.SingleRecord{ + Key: d.unwrapKey(rec.Key), + Position: rec.Position, + Operation: operation, + Payload: opencdc.Change{ + Before: before, + After: after, + }, + Metadata: metadata, + }, nil +} + +func (d *debeziumProcessor) valueToData(val any) (opencdc.Data, error) { + switch v := val.(type) { + case map[string]any: + return opencdc.StructuredData(v), nil + case string: + return opencdc.RawData(v), nil + case nil: + // nil is allowed + return nil, nil + default: + return nil, cerrors.Errorf("expected a map or a string, got %T", val) + } +} + +func (d *debeziumProcessor) validateRecord(data opencdc.StructuredData) error { + var multiErr error + if _, ok := data[debeziumFieldAfter]; !ok { + multiErr = multierror.Append(multiErr, cerrors.Errorf("the %q field is missing from debezium payload", debeziumFieldAfter)) + } + if _, ok := data[debeziumFieldSource]; !ok { + multiErr = multierror.Append(multiErr, cerrors.Errorf("the %q field is missing from debezium payload", debeziumFieldSource)) + } + if _, ok := data[debeziumFieldOp]; !ok { + multiErr = multierror.Append(multiErr, cerrors.Errorf("the %q field is missing from debezium payload", debeziumFieldOp)) + } + // ts_ms and transaction can be empty + return multiErr +} + +func (d *debeziumProcessor) unwrapMetadata(rec opencdc.Record, dbzRec opencdc.StructuredData) (opencdc.Metadata, error) { + var source map[string]string + for field, val := range dbzRec { + switch field { + case debeziumFieldAfter, debeziumFieldBefore, debeziumFieldOp: + continue // ignore + case debeziumFieldTimestamp: + tsMs, ok := val.(float64) + if !ok { + return nil, cerrors.Errorf("%s is not a float", debeziumFieldTimestamp) + } + readAt := time.UnixMilli(int64(tsMs)) + rec.Metadata.SetReadAt(readAt) + case debeziumFieldSource: + // don't add prefix for source fields to be consistent with the + // behavior of the debezium converter in the SDK - it puts all + // metadata fields into the `source` field + source = d.flatten("", val) + default: + flattened := d.flatten("debezium."+field, val) + for k, v := range flattened { + rec.Metadata[k] = v + } + } + } + + // source is added at the end to overwrite any other fields + for k, v := range source { + rec.Metadata[k] = v + } + + return rec.Metadata, nil +} + +func (d *debeziumProcessor) flatten(key string, val any) map[string]string { + var prefix string + if len(key) > 0 { + prefix = key + "." + } + switch val := val.(type) { + case map[string]any: + out := make(map[string]string) + for k1, v1 := range val { + for k2, v2 := range d.flatten(prefix+k1, v1) { + out[k2] = v2 + } + } + return out + case nil: + return nil + case string: + return map[string]string{key: val} + default: + return map[string]string{key: fmt.Sprint(val)} + } +} + +// convertOperation converts debezium operation to openCDC operation +func (d *debeziumProcessor) convertOperation(op string) (opencdc.Operation, error) { + switch op { + case debeziumOpCreate: + return opencdc.OperationCreate, nil + case debeziumOpUpdate: + return opencdc.OperationUpdate, nil + case debeziumOpDelete: + return opencdc.OperationDelete, nil + case debeziumOpRead: + return opencdc.OperationSnapshot, nil + case debeziumOpUnset: + return opencdc.OperationUpdate, nil + } + return 0, cerrors.Errorf("%q is an invalid operation", op) +} + +func (d *debeziumProcessor) unwrapKey(key opencdc.Data) opencdc.Data { + // convert the key to structured data + var structKey opencdc.StructuredData + switch d := key.(type) { + case opencdc.RawData: + // try unmarshalling raw key + err := json.Unmarshal(key.Bytes(), &structKey) + // if key is not json formatted, return the original key + if err != nil { + return key + } + case opencdc.StructuredData: + structKey = d + } + + payload, ok := structKey["payload"] + // return the original key if it doesn't contain a payload + if !ok { + return key + } + + // if payload is a map, return the payload as structured data + if p, ok := payload.(map[string]any); ok { + return opencdc.StructuredData(p) + } + + // otherwise, convert the payload to string, then return it as raw data + return opencdc.RawData(fmt.Sprint(payload)) +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/debezium_examples_test.go b/pkg/plugin/processor/builtin/impl/unwrap/debezium_examples_test.go new file mode 100644 index 000000000..88fef909a --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/debezium_examples_test.go @@ -0,0 +1,109 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package unwrap + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // we're using a more descriptive name of example +func ExampleDebeziumProcessor() { + p := NewDebeziumProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Unwrap a Debezium record", + Description: `This example how to unwrap a Debezium record from a field nested in a record's +` + ".Payload.After" + ` field. It additionally shows how the key is unwrapped, and the metadata merged.`, + Config: map[string]string{ + "field": ".Payload.After.nested", + }, + Have: opencdc.Record{ + Position: opencdc.Position("test-position"), + Operation: opencdc.OperationCreate, + Key: opencdc.RawData(`{"payload":"27"}`), + Metadata: opencdc.Metadata{"metadata-key": "metadata-value"}, + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "nested": `{ + "payload": { + "after": { + "description": "test1", + "id": 27 + }, + "before": null, + "op": "c", + "source": { + "opencdc.readAt": "1674061777225877000", + "opencdc.version": "v1" + }, + "transaction": null, + "ts_ms": 1674061777225 + }, + "schema": {} +}`, + }, + }, + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("test-position"), + Key: opencdc.RawData("27"), + Operation: opencdc.OperationCreate, + Metadata: opencdc.Metadata{ + "metadata-key": "metadata-value", + "opencdc.readAt": "1674061777225877000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "description": "test1", + "id": float64(27), + }, + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,14 +1,17 @@ + // { + // "position": "dGVzdC1wb3NpdGlvbg==", + // "operation": "create", + // "metadata": { + // - "metadata-key": "metadata-value" + // + "metadata-key": "metadata-value", + // - }, + // - "key": "{\"payload\":\"27\"}", + // - "payload": { + // - "before": null, + // - "after": { + // - "nested": "{\n \"payload\": {\n \"after\": {\n \"description\": \"test1\",\n \"id\": 27\n },\n \"before\": null,\n \"op\": \"c\",\n \"source\": {\n \"opencdc.readAt\": \"1674061777225877000\",\n \"opencdc.version\": \"v1\"\n },\n \"transaction\": null,\n \"ts_ms\": 1674061777225\n },\n \"schema\": {}\n}" + // + "opencdc.readAt": "1674061777225877000", + // + "opencdc.version": "v1" + // + }, + // + "key": "27", + // + "payload": { + // + "before": null, + // + "after": { + // + "description": "test1", + // + "id": 27 + // } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/debezium_paramgen.go b/pkg/plugin/processor/builtin/impl/unwrap/debezium_paramgen.go new file mode 100644 index 000000000..097224b54 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/debezium_paramgen.go @@ -0,0 +1,23 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package unwrap + +import ( + "regexp" + + "github.com/conduitio/conduit-commons/config" +) + +func (debeziumConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: ".Payload.After", + Description: "Field is a reference to the field which contains the Debezium record.\n\nFor more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRegex{Regex: regexp.MustCompile("^.Payload")}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/debezium_test.go b/pkg/plugin/processor/builtin/impl/unwrap/debezium_test.go new file mode 100644 index 000000000..0d3bcb226 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/debezium_test.go @@ -0,0 +1,248 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package unwrap + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + "github.com/google/go-cmp/cmp" + "github.com/matryer/is" +) + +func TestDebeziumProcessor_Configure(t *testing.T) { + testCases := []struct { + name string + config map[string]string + wantErr string + }{ + { + name: "optional not provided", + config: map[string]string{}, + wantErr: "", + }, + { + name: "valid field (within .Payload)", + config: map[string]string{"field": ".Payload.After.something"}, + wantErr: "", + }, + { + name: "invalid field", + config: map[string]string{"field": ".Key"}, + wantErr: `invalid configuration: error validating "field": ".Key" should match the regex "^.Payload": regex validation failed`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + err := NewDebeziumProcessor(log.Test(t)).Configure(context.Background(), tc.config) + if tc.wantErr != "" { + is.True(err != nil) + is.Equal(tc.wantErr, err.Error()) + } else { + is.NoErr(err) + } + }) + } +} + +func TestDebeziumProcessor_Process(t *testing.T) { + testCases := []struct { + name string + config map[string]string + record opencdc.Record + want sdk.ProcessedRecord + wantErr string + }{ + { + name: "raw payload", + config: map[string]string{"field": ".Payload.After"}, + record: opencdc.Record{ + Metadata: map[string]string{}, + Key: opencdc.RawData(`{"payload":"27"}`), + Position: []byte("position"), + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData(`{ + "payload": { + "after": { + "description": "test1", + "id": 27 + }, + "before": null, + "op": "c", + "source": { + "opencdc.readAt": "1674061777225877000", + "opencdc.version": "v1" + }, + "transaction": null, + "ts_ms": 1674061777225 + }, + "schema": {} + }`), + }, + }, + want: sdk.SingleRecord{ + Position: opencdc.Position("position"), + Key: opencdc.RawData("27"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "opencdc.readAt": "1674061777225877000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{"description": "test1", "id": float64(27)}, + }, + }, + }, + { + name: "structured payload", + config: map[string]string{"field": ".Payload.After"}, + record: opencdc.Record{ + Metadata: map[string]string{ + "conduit.version": "v0.4.0", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "payload": map[string]any{ + "after": map[string]any{ + "description": "test1", + "id": 27, + }, + "before": nil, + "op": "u", + "source": map[string]any{ + "opencdc.version": "v1", + }, + "transaction": nil, + "ts_ms": float64(1674061777225), + }, + "schema": map[string]any{}, + }, + }, + Key: opencdc.StructuredData{ + "payload": 27, + "schema": map[string]any{}, + }, + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationUpdate, + Metadata: map[string]string{ + "opencdc.readAt": "1674061777225000000", + "opencdc.version": "v1", + "conduit.version": "v0.4.0", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{"description": "test1", "id": 27}, + }, + Key: opencdc.RawData("27"), + }, + }, + { + name: "structured data, payload missing", + config: map[string]string{"field": ".Payload.After"}, + record: opencdc.Record{ + Metadata: map[string]string{ + "conduit.version": "v0.4.0", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "foo": "bar", + "schema": map[string]any{}, + }, + }, + Key: opencdc.StructuredData{ + "payload": 27, + "schema": map[string]any{}, + }, + }, + want: sdk.ErrorRecord{ + Error: cerrors.New("data to be unwrapped doesn't contain a payload field"), + }, + }, + { + name: "custom field, structured payload", + config: map[string]string{"field": ".Payload.After[\"debezium_event\"]"}, + record: opencdc.Record{ + Metadata: map[string]string{ + "conduit.version": "v0.4.0", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "debezium_event": map[string]any{ + "payload": map[string]any{ + "after": map[string]any{ + "description": "test1", + "id": 27, + }, + "before": nil, + "op": "u", + "source": map[string]any{ + "opencdc.version": "v1", + }, + "transaction": nil, + "ts_ms": float64(1674061777225), + }, + "schema": map[string]any{}, + }, + }, + }, + Key: opencdc.StructuredData{ + "payload": 27, + "schema": map[string]any{}, + }, + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationUpdate, + Metadata: map[string]string{ + "opencdc.readAt": "1674061777225000000", + "opencdc.version": "v1", + "conduit.version": "v0.4.0", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{"description": "test1", "id": 27}, + }, + Key: opencdc.RawData("27"), + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + underTest := NewDebeziumProcessor(log.Test(t)) + err := underTest.Configure(context.Background(), tc.config) + is.NoErr(err) + + gotSlice := underTest.Process(context.Background(), []opencdc.Record{tc.record}) + is.Equal(1, len(gotSlice)) + is.Equal("", cmp.Diff(tc.want, gotSlice[0], internal.CmpProcessedRecordOpts...)) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/examples_exporter_test.go b/pkg/plugin/processor/builtin/impl/unwrap/examples_exporter_test.go new file mode 100644 index 000000000..2342eb469 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/examples_exporter_test.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build export_processors + +package unwrap + +import ( + "os" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +func TestMain(m *testing.M) { + code := m.Run() + if code > 0 { + os.Exit(code) + } + + // tests passed, export the processors + exampleutil.ExportProcessors() +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/examples_test.go b/pkg/plugin/processor/builtin/impl/unwrap/examples_test.go new file mode 100644 index 000000000..3108bcaf5 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/examples_test.go @@ -0,0 +1,17 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go test -count=1 -tags export_processors . + +package unwrap diff --git a/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect.go b/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect.go new file mode 100644 index 000000000..59f1c51a6 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect.go @@ -0,0 +1,175 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=kafka_connect_paramgen.go kafkaConnectConfig + +package unwrap + +import ( + "context" + "fmt" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/goccy/go-json" +) + +type kafkaConnectConfig struct { + // Field is a reference to the field which contains the Kafka Connect record. + // + // For more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66. + Field string `json:"field" validate:"regex=^.Payload" default:".Payload.After"` +} + +type kafkaConnectProcessor struct { + sdk.UnimplementedProcessor + + fieldRefRes sdk.ReferenceResolver +} + +func NewKafkaConnectProcessor(log.CtxLogger) sdk.Processor { + return &kafkaConnectProcessor{} +} + +func (u *kafkaConnectProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "unwrap.kafkaconnect", + Summary: "Unwraps a Kafka Connect record from an OpenCDC record.", + Description: `This processor unwraps a Kafka Connect record from the input OpenCDC record. + +The input record's payload is replaced with the Kafka Connect record. + +This is useful in cases where Conduit acts as an intermediary between a Debezium source and a Debezium destination. +In such cases, the Debezium record is set as the OpenCDC record's payload, and needs to be unwrapped for further usage.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: kafkaConnectConfig{}.Parameters(), + }, nil +} + +func (u *kafkaConnectProcessor) Configure(ctx context.Context, m map[string]string) error { + cfg := kafkaConnectConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, cfg.Parameters()) + if err != nil { + return err + } + + rr, err := sdk.NewReferenceResolver(cfg.Field) + if err != nil { + return cerrors.Errorf("invalid reference: %w", err) + } + + u.fieldRefRes = rr + return nil +} + +func (u *kafkaConnectProcessor) Open(context.Context) error { + return nil +} + +func (u *kafkaConnectProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + proc, err := u.processRecord(rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, proc) + } + + return out +} + +func (u *kafkaConnectProcessor) processRecord(rec opencdc.Record) (sdk.ProcessedRecord, error) { + ref, err := u.fieldRefRes.Resolve(&rec) + if err != nil { + return nil, cerrors.Errorf("failed resolving reference: %w", err) + } + + var kc opencdc.StructuredData + switch d := ref.Get().(type) { + case opencdc.StructuredData: + kc = d + case map[string]any: + kc = d + case opencdc.RawData: + err := json.Unmarshal(d.Bytes(), &kc) + if err != nil { + return nil, cerrors.Errorf("failed unmarshalling JSON from raw data: %w", err) + } + case string: + err := json.Unmarshal([]byte(d), &kc) + if err != nil { + return nil, cerrors.Errorf("failed unmarshalling JSON from string: %w", err) + } + default: + return nil, cerrors.Errorf("unexpected data type %T (only structured data is supported)", ref.Get()) + } + + // get payload + structPayload, ok := kc["payload"].(map[string]any) + if !ok { + return nil, cerrors.New("referenced record field doesn't contain a payload field") + } + + return sdk.SingleRecord{ + Key: u.unwrapKey(rec.Key), + Position: rec.Position, + Metadata: rec.Metadata, + Payload: opencdc.Change{ + After: opencdc.StructuredData(structPayload), + }, + Operation: opencdc.OperationCreate, + }, nil +} + +// todo same as in debezium +func (u *kafkaConnectProcessor) unwrapKey(key opencdc.Data) opencdc.Data { + // convert the key to structured data + var structKey opencdc.StructuredData + switch d := key.(type) { + case opencdc.RawData: + // try unmarshalling raw key + err := json.Unmarshal(key.Bytes(), &structKey) + // if key is not json formatted, return the original key + if err != nil { + return key + } + case opencdc.StructuredData: + structKey = d + } + + payload, ok := structKey["payload"] + // return the original key if it doesn't contain a payload + if !ok { + return key + } + + // if payload is a map, return the payload as structured data + switch p := payload.(type) { + case opencdc.StructuredData: + return p + case map[string]any: + return opencdc.StructuredData(p) + default: + // otherwise, convert the payload to string, then return it as raw data + return opencdc.RawData(fmt.Sprint(payload)) + } +} + +func (u *kafkaConnectProcessor) Teardown(context.Context) error { + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_examples_test.go b/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_examples_test.go new file mode 100644 index 000000000..d359ea3ab --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_examples_test.go @@ -0,0 +1,100 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package unwrap + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // we're using a more descriptive name of example +func ExampleKafkaConnectProcesor() { + p := NewKafkaConnectProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Unwrap a Kafka Connect record", + Description: `This example shows how to unwrap a Kafka Connect record. + +The Kafka Connect record is serialized as a JSON string in the ` + "`.Payload.After`" + ` field (raw data). +The Kafka Connect record's payload will replace the OpenCDC record's payload. + +We also see how the key is unwrapped too. In this case, the key comes in as structured data.`, + Config: map[string]string{}, + Have: opencdc.Record{ + Position: opencdc.Position("test position"), + Operation: opencdc.OperationCreate, + Metadata: opencdc.Metadata{ + "metadata-key": "metadata-value", + }, + Key: opencdc.StructuredData{ + "payload": map[string]interface{}{ + "id": 27, + }, + "schema": map[string]interface{}{}, + }, + Payload: opencdc.Change{ + After: opencdc.RawData(`{ +"payload": { + "description": "test2" +}, +"schema": {} +}`), + }, + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("test position"), + Operation: opencdc.OperationCreate, + Metadata: opencdc.Metadata{ + "metadata-key": "metadata-value", + }, + Key: opencdc.StructuredData{"id": 27}, + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "description": "test2", + }, + }, + }, + }) + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,17 +1,16 @@ + // { + // "position": "dGVzdCBwb3NpdGlvbg==", + // "operation": "create", + // "metadata": { + // "metadata-key": "metadata-value" + // }, + // "key": { + // - "payload": { + // - "id": 27 + // + "id": 27 + // - }, + // + }, + // - "schema": {} + // - }, + // - "payload": { + // + "payload": { + // "before": null, + // - "after": "{\n\"payload\": {\n \"description\": \"test2\"\n},\n\"schema\": {}\n}" + // + "after": { + // + "description": "test2" + // + } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_paramgen.go b/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_paramgen.go new file mode 100644 index 000000000..ea9745a99 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_paramgen.go @@ -0,0 +1,23 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package unwrap + +import ( + "regexp" + + "github.com/conduitio/conduit-commons/config" +) + +func (kafkaConnectConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: ".Payload.After", + Description: "Field is a reference to the field which contains the Kafka Connect record.\n\nFor more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRegex{Regex: regexp.MustCompile("^.Payload")}, + }, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_test.go b/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_test.go new file mode 100644 index 000000000..23d4dc215 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/kafka_connect_test.go @@ -0,0 +1,155 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package unwrap + +import ( + "context" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + "github.com/google/go-cmp/cmp" + "github.com/matryer/is" +) + +func TestKafkaConnectProcessor_Configure(t *testing.T) { + testCases := []struct { + name string + config map[string]string + wantErr string + }{ + { + name: "optional parameter not provided", + config: map[string]string{}, + wantErr: "", + }, + { + name: "valid field (within .Payload)", + config: map[string]string{"field": ".Payload.After.something"}, + wantErr: "", + }, + { + name: "invalid field", + config: map[string]string{"field": ".Key"}, + wantErr: `config invalid: error validating "field": ".Key" should match the regex "^.Payload": regex validation failed`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + err := NewKafkaConnectProcessor(log.Test(t)).Configure(context.Background(), tc.config) + if tc.wantErr != "" { + is.True(err != nil) + is.Equal(tc.wantErr, err.Error()) + } else { + is.NoErr(err) + } + }) + } +} +func TestKafkaConnectProcessor_Process(t *testing.T) { + testCases := []struct { + name string + config map[string]string + record opencdc.Record + want sdk.ProcessedRecord + }{ + { + name: "structured payload", + config: map[string]string{}, + record: opencdc.Record{ + Metadata: map[string]string{}, + Payload: opencdc.Change{ + Before: opencdc.StructuredData(nil), + After: opencdc.StructuredData{ + "payload": map[string]any{ + "description": "test2", + "id": 27, + }, + "schema": map[string]any{}, + }, + }, + Key: opencdc.StructuredData{ + "payload": map[string]any{ + "id": 27, + }, + "schema": map[string]any{}, + }, + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationCreate, + Metadata: map[string]string{}, + Payload: opencdc.Change{ + After: opencdc.StructuredData{"description": "test2", "id": 27}, + }, + Key: opencdc.StructuredData{"id": 27}, + }, + }, + { + name: "raw payload", + config: map[string]string{}, + record: opencdc.Record{ + Position: opencdc.Position("test position"), + Operation: opencdc.OperationSnapshot, + Metadata: map[string]string{ + "metadata-key": "metadata-value", + }, + Key: opencdc.RawData("key"), + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData(`{ + "payload": { + "description": "test2" + }, + "schema": {} + }`), + }, + }, + want: sdk.SingleRecord{ + Position: opencdc.Position("test position"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "metadata-key": "metadata-value", + }, + Key: opencdc.RawData("key"), + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "description": "test2", + }, + }, + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + ctx := context.Background() + + underTest := NewKafkaConnectProcessor(log.Test(t)) + err := underTest.Configure(ctx, tc.config) + is.NoErr(err) + + got := underTest.Process(ctx, []opencdc.Record{tc.record}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(tc.want, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/opencdc.go b/pkg/plugin/processor/builtin/impl/unwrap/opencdc.go new file mode 100644 index 000000000..691d28479 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/opencdc.go @@ -0,0 +1,269 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=opencdc_paramgen.go openCDCConfig + +package unwrap + +import ( + "context" + "encoding/base64" + "fmt" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/goccy/go-json" +) + +type openCDCConfig struct { + // Field is a reference to the field which contains the OpenCDC record. + // + // For more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66. + Field string `json:"field" default:".Payload.After"` +} + +type openCDCProcessor struct { + sdk.UnimplementedProcessor + + logger log.CtxLogger + fieldRefRes sdk.ReferenceResolver +} + +func NewOpenCDCProcessor(logger log.CtxLogger) sdk.Processor { + return &openCDCProcessor{logger: logger} +} + +func (u *openCDCProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "unwrap.opencdc", + Summary: "A processor that unwraps the OpenCDC record saved in one of record's fields.", + Description: `The unwrap.opencdc processor is useful in situations where a record goes through intermediate +systems before being written to a final destination. In these cases, the original OpenCDC record is part of the payload +read from the intermediate system and needs to be unwrapped before being written. + +Note: if the wrapped OpenCDC record is not in a structured data field, then it's assumed that it's stored in JSON format.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: openCDCConfig{}.Parameters(), + }, nil +} + +func (u *openCDCProcessor) Configure(ctx context.Context, m map[string]string) error { + cfg := openCDCConfig{} + err := sdk.ParseConfig(ctx, m, &cfg, cfg.Parameters()) + if err != nil { + return cerrors.Errorf("failed parsing configuration: %w", err) + } + + rr, err := sdk.NewReferenceResolver(cfg.Field) + if err != nil { + return cerrors.Errorf("invalid reference: %w", err) + } + + u.fieldRefRes = rr + return nil +} + +func (u *openCDCProcessor) Open(context.Context) error { + return nil +} + +func (u *openCDCProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + proc, err := u.processRecord(rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, proc) + } + + return out +} + +func (u *openCDCProcessor) processRecord(rec opencdc.Record) (sdk.ProcessedRecord, error) { + ref, err := u.fieldRefRes.Resolve(&rec) + if err != nil { + return nil, cerrors.Errorf("failed resolving record reference: %w", err) + } + + var data opencdc.StructuredData + switch v := ref.Get().(type) { + case opencdc.RawData: + // unmarshal raw data to structured + if err := json.Unmarshal(v.Bytes(), &data); err != nil { + return nil, cerrors.Errorf("failed to unmarshal raw data as JSON: %w", err) + } + case string: + // unmarshal raw data to structured + if err := json.Unmarshal([]byte(v), &data); err != nil { + return nil, cerrors.Errorf("failed to unmarshal raw data as JSON: %w", err) + } + case opencdc.StructuredData: + data = v + case nil: + return nil, cerrors.New("field to unmarshal is nil") + default: + return nil, cerrors.Errorf("unexpected data type %T", v) + } + + opencdcRec, err := u.unmarshalRecord(data) + if err != nil { + return nil, cerrors.Errorf("failed unmarshalling record: %w", err) + } + // Position is the only key we preserve from the original record to maintain the reference respect other messages + // that will be coming from in the event of chaining pipelines (e.g.: source -> kafka, kafka -> destination) + opencdcRec.Position = rec.Position + + return sdk.SingleRecord(opencdcRec), nil +} + +func (u *openCDCProcessor) Teardown(context.Context) error { + return nil +} + +func (u *openCDCProcessor) unmarshalRecord(structData opencdc.StructuredData) (opencdc.Record, error) { + operation, err := u.unmarshalOperation(structData) + if err != nil { + return opencdc.Record{}, cerrors.Errorf("failed unmarshalling operation: %w", err) + } + + metadata, err := u.unmarshalMetadata(structData) + if err != nil { + return opencdc.Record{}, cerrors.Errorf("failed unmarshalling metadata: %w", err) + } + + key, err := u.convertData(structData, "key") + if err != nil { + return opencdc.Record{}, cerrors.Errorf("failed unmarshalling key: %w", err) + } + + payload, err := u.unmarshalPayload(structData) + if err != nil { + return opencdc.Record{}, cerrors.Errorf("failed unmarshalling payload: %w", err) + } + + return opencdc.Record{ + Key: key, + Metadata: metadata, + Payload: payload, + Operation: operation, + }, nil +} + +// unmarshalOperation extracts operation from a structuredData record. +func (u *openCDCProcessor) unmarshalOperation(structData opencdc.StructuredData) (opencdc.Operation, error) { + var operation opencdc.Operation + op, ok := structData["operation"] + if !ok { + return operation, cerrors.New("no operation") + } + + switch opType := op.(type) { + case opencdc.Operation: + operation = opType + case string: + if err := operation.UnmarshalText([]byte(opType)); err != nil { + return operation, cerrors.Errorf("invalid operation %q", opType) + } + default: + return operation, cerrors.Errorf("expected a opencdc.Operation or a string, got %T", opType) + } + return operation, nil +} + +// unmarshalMetadata extracts metadata from a structuredData record. +func (u *openCDCProcessor) unmarshalMetadata(structData opencdc.StructuredData) (opencdc.Metadata, error) { + var metadata opencdc.Metadata + meta, ok := structData["metadata"] + if !ok { + return nil, cerrors.New("no metadata") + } + + switch m := meta.(type) { + case opencdc.Metadata: + metadata = m + case map[string]interface{}: + metadata = make(opencdc.Metadata, len(m)) + for k, v := range m { + // if it's already a string, then fmt.Sprint() will be slower + if str, ok := v.(string); ok { + metadata[k] = str + } else { + metadata[k] = fmt.Sprint(v) + } + } + default: + return nil, cerrors.Errorf("expected a opencdc.Metadata or a map[string]interface{}, got %T", m) + } + + return metadata, nil +} + +func (u *openCDCProcessor) convertData(m map[string]interface{}, key string) (opencdc.Data, error) { + data, ok := m[key] + if !ok || data == nil { + return nil, nil + } + + switch d := data.(type) { + case opencdc.Data: + return d, nil + case map[string]interface{}: + return opencdc.StructuredData(d), nil + case string: + decoded := make([]byte, base64.StdEncoding.DecodedLen(len(d))) + n, err := base64.StdEncoding.Decode(decoded, []byte(d)) + if err != nil { + return nil, cerrors.Errorf("couldn't decode payload %s: %w", err, key) + } + return opencdc.RawData(decoded[:n]), nil + default: + return nil, cerrors.Errorf("expected a map[string]interface{} or string, got: %T", d) + } +} + +// unmarshalPayload extracts payload from a structuredData record. +func (u *openCDCProcessor) unmarshalPayload(structData opencdc.StructuredData) (opencdc.Change, error) { + var payload opencdc.Change + pl, ok := structData["payload"] + if !ok { + return payload, cerrors.New("no payload") + } + + switch p := pl.(type) { + case opencdc.Change: + payload = p + case map[string]interface{}: + before, err := u.convertData(p, "before") + if err != nil { + return opencdc.Change{}, err + } + + after, err := u.convertData(p, "after") + if err != nil { + return opencdc.Change{}, err + } + + payload = opencdc.Change{ + Before: before, + After: after, + } + default: + return opencdc.Change{}, cerrors.Errorf("expected a opencdc.Change or a map[string]interface{}, got %T", p) + } + return payload, nil +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/opencdc_examples_test.go b/pkg/plugin/processor/builtin/impl/unwrap/opencdc_examples_test.go new file mode 100644 index 000000000..a0f9b6e78 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/opencdc_examples_test.go @@ -0,0 +1,111 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package unwrap + +import ( + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +//nolint:govet // we're using a more descriptive name of example +func ExampleOpenCDCProcessor() { + p := NewOpenCDCProcessor(log.Nop()) + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: "Unwrap an OpenCDC record", + Description: "In this example we use the `unwrap.opencdc` processor to unwrap the OpenCDC record found in the " + + "record's `.Payload.After` field.", + Config: map[string]string{}, + Have: opencdc.Record{ + Position: opencdc.Position("wrapping position"), + Key: opencdc.RawData("wrapping key"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{}, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "position": opencdc.Position("test-position"), + "operation": opencdc.OperationUpdate, + "key": map[string]interface{}{ + "id": "test-key", + }, + "metadata": opencdc.Metadata{}, + "payload": opencdc.Change{ + After: opencdc.StructuredData{ + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "sensor_id": 1250383582, + "triggered": false, + }, + }, + }, + }, + }, + Want: sdk.SingleRecord{ + Position: opencdc.Position("wrapping position"), + Operation: opencdc.OperationUpdate, + Key: opencdc.StructuredData{ + "id": "test-key", + }, + Metadata: opencdc.Metadata{}, + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "sensor_id": 1250383582, + "triggered": false, + }, + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,25 +1,16 @@ + // { + // "position": "d3JhcHBpbmcgcG9zaXRpb24=", + // - "operation": "create", + // + "operation": "update", + // "metadata": {}, + // - "key": "wrapping key", + // + "key": { + // + "id": "test-key" + // + }, + // "payload": { + // "before": null, + // "after": { + // - "key": { + // - "id": "test-key" + // - }, + // - "metadata": {}, + // - "operation": "update", + // - "payload": { + // - "before": null, + // - "after": { + // - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + // - "sensor_id": 1250383582, + // - "triggered": false + // - } + // - }, + // + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + // + "sensor_id": 1250383582, + // - "position": "dGVzdC1wb3NpdGlvbg==" + // + "triggered": false + // } + // } + // } +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/opencdc_paramgen.go b/pkg/plugin/processor/builtin/impl/unwrap/opencdc_paramgen.go new file mode 100644 index 000000000..058e32b38 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/opencdc_paramgen.go @@ -0,0 +1,19 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package unwrap + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (openCDCConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "field": { + Default: ".Payload.After", + Description: "Field is a reference to the field which contains the OpenCDC record.\n\nFor more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/unwrap/opencdc_test.go b/pkg/plugin/processor/builtin/impl/unwrap/opencdc_test.go new file mode 100644 index 000000000..c04dd784e --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/unwrap/opencdc_test.go @@ -0,0 +1,598 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package unwrap + +import ( + "context" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/google/go-cmp/cmp" + "github.com/matryer/is" +) + +const RecordUpdateWithBefore = `{ + "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", + "operation": "update", + "metadata": { + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1" + }, + "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", + "payload": { + "before": { + "event_id": 1747353650, + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": 1250383582, + "triggered": false + }, + "after": { + "event_id": 1747353658, + "msg": "string 0f5397c9-31f1-422a-9c9a-26e3574a5c31", + "pg_generator": false, + "sensor_id": 1250383580, + "triggered": false + } + } + }` + +const RecordUpdateNoBefore = `{ + "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", + "operation": "update", + "metadata": { + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1" + }, + "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", + "payload": { + "before": null, + "after": { + "event_id": 1747353650, + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": 1250383582, + "triggered": false + } + } + }` + +const RecordDeleteNoBefore = `{ + "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", + "operation": "delete", + "metadata": { + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1" + }, + "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", + "payload": { + "before": null, + "after": null + } + }` + +const RecordDeleteWithBefore = `{ + "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", + "operation": "delete", + "metadata": { + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1" + }, + "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", + "payload": { + "before": { + "event_id": 1747353650, + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": 1250383582, + "triggered": false + }, + "after": null + } + }` + +const RecordCreate = `{ + "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", + "operation": "create", + "metadata": { + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1" + }, + "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", + "payload": { + "before": null, + "after": { + "event_id": 1747353650, + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": 1250383582, + "triggered": false + } + } + }` + +func TestUnwrapOpenCDC_Configure(t *testing.T) { + testCases := []struct { + name string + in map[string]string + wantErr string + }{ + { + name: "invalid field", + in: map[string]string{"field": ".Payload.Something"}, + wantErr: `invalid reference: invalid reference ".Payload.Something": unexpected field "Something": cannot resolve reference`, + }, + { + name: "valid field", + in: map[string]string{"field": ".Payload.Before"}, + wantErr: "", + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + underTest := NewOpenCDCProcessor(log.Test(t)) + gotErr := underTest.Configure(context.Background(), tc.in) + if tc.wantErr == "" { + is.NoErr(gotErr) + } else { + is.True(gotErr != nil) + is.Equal(tc.wantErr, gotErr.Error()) + } + }) + } +} + +func TestUnwrapOpenCDC_Process(t *testing.T) { + tests := []struct { + name string + record opencdc.Record + want sdk.ProcessedRecord + config map[string]string + }{ + { + name: "create with structured data and no payload after", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{}, + Payload: opencdc.Change{}, + Position: []byte("a position"), + }, + want: sdk.ErrorRecord{Error: cerrors.New("field to unmarshal is nil")}, + }, + { + name: "create with an invalid operation", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData(`{ + "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", + "operation": "foobar", + "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", + "payload": { + "after": { + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d" + } + } + }`, + ), + }, + Position: []byte("test position"), + }, + want: sdk.ErrorRecord{ + Error: cerrors.New("failed unmarshalling record: failed unmarshalling operation: invalid operation \"foobar\""), + }, + }, + { + name: "create with an invalid metadata", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Payload: opencdc.Change{ + After: opencdc.RawData(`{ + "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", + "operation": "create", + "metadata": "invalid", + "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", + "payload": { + "after": { + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d" + } + } + }`), + }, + Position: []byte("test-position"), + }, + want: sdk.ErrorRecord{ + Error: cerrors.New("failed unmarshalling record: failed unmarshalling metadata: expected a opencdc.Metadata or a map[string]interface{}, got string"), + }, + }, + { + name: "create with an invalid key", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Payload: opencdc.Change{ + After: opencdc.RawData(`{ + "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", + "operation": "create", + "metadata": {}, + "key": 1, + "payload": { + "after": { + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d" + } + } + }`), + }, + Position: []byte("test-pos"), + }, + want: sdk.ErrorRecord{ + Error: cerrors.New("failed unmarshalling record: failed unmarshalling key: expected a map[string]interface{} or string, got: float64"), + }, + }, + { + name: "create with an invalid payload", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData("this is not a json string"), + }, + Position: []byte("test-pos"), + }, + want: sdk.ErrorRecord{Error: cerrors.New("failed to unmarshal raw data as JSON: expected { character for map value")}, + }, + { + name: "create with structured data", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", + "kafka.topic": "stream-78lpnchx7tzpyqz-generator", + "opencdc.createdAt": "1706028953595000000", + "opencdc.readAt": "1706028953606997000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "position": []byte("NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0"), + "operation": opencdc.OperationCreate, + "metadata": opencdc.Metadata{ + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1", + }, + "key": map[string]interface{}{ + "id": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", + }, + "payload": opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "event_id": 1747353650, + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": 1250383582, + "triggered": false, + }, + }, + }, + }, + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationCreate, + Metadata: opencdc.Metadata{ + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "event_id": 1747353650, + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": 1250383582, + "triggered": false, + }, + }, + Key: opencdc.StructuredData{"id": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh"}, + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + }, + { + name: "create with raw data", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", + "kafka.topic": "stream-78lpnchx7tzpyqz-generator", + "opencdc.createdAt": "1706028953595000000", + "opencdc.readAt": "1706028953606997000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData(RecordCreate), + }, + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationCreate, + Metadata: opencdc.Metadata{ + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "event_id": float64(1747353650), + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": float64(1250383582), + "triggered": false, + }, + }, + Key: opencdc.RawData("17774941-57a2-42fa-b430-8912a9424b3a"), + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + }, + { + name: "delete with before and with raw data", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", + "kafka.topic": "stream-78lpnchx7tzpyqz-generator", + "opencdc.createdAt": "1706028953595000000", + "opencdc.readAt": "1706028953606997000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData(RecordDeleteWithBefore), + }, + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationDelete, + Metadata: opencdc.Metadata{ + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "event_id": float64(1747353650), + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": float64(1250383582), + "triggered": false, + }, + After: nil, + }, + Key: opencdc.RawData("17774941-57a2-42fa-b430-8912a9424b3a"), + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + }, + { + name: "delete without before and with raw data", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", + "kafka.topic": "stream-78lpnchx7tzpyqz-generator", + "opencdc.createdAt": "1706028953595000000", + "opencdc.readAt": "1706028953606997000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData(RecordDeleteNoBefore), + }, + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationDelete, + Metadata: opencdc.Metadata{ + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: nil, + }, + Key: opencdc.RawData("17774941-57a2-42fa-b430-8912a9424b3a"), + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + }, + { + name: "update with before and with raw data", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", + "kafka.topic": "stream-78lpnchx7tzpyqz-generator", + "opencdc.createdAt": "1706028953595000000", + "opencdc.readAt": "1706028953606997000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData(RecordUpdateWithBefore), + }, + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationUpdate, + Metadata: opencdc.Metadata{ + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "event_id": float64(1747353650), + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": float64(1250383582), + "triggered": false, + }, + After: opencdc.StructuredData{ + "event_id": float64(1.747353658e+09), + "msg": "string 0f5397c9-31f1-422a-9c9a-26e3574a5c31", + "pg_generator": false, + "sensor_id": float64(1.25038358e+09), + "triggered": false, + }, + }, + Key: opencdc.RawData("17774941-57a2-42fa-b430-8912a9424b3a"), + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + }, + { + name: "update without before and with raw data", + config: map[string]string{}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", + "kafka.topic": "stream-78lpnchx7tzpyqz-generator", + "opencdc.createdAt": "1706028953595000000", + "opencdc.readAt": "1706028953606997000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.RawData(RecordUpdateNoBefore), + }, + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationUpdate, + Metadata: opencdc.Metadata{ + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "event_id": float64(1747353650), + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": float64(1250383582), + "triggered": false, + }, + }, + Key: opencdc.RawData("17774941-57a2-42fa-b430-8912a9424b3a"), + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + }, + { + name: "update without before and with raw data", + config: map[string]string{"field": ".Payload.After.nested"}, + record: opencdc.Record{ + Key: opencdc.RawData("one-key-raw-data"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", + "kafka.topic": "stream-78lpnchx7tzpyqz-generator", + "opencdc.createdAt": "1706028953595000000", + "opencdc.readAt": "1706028953606997000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "nested": opencdc.RawData(RecordUpdateNoBefore), + }, + }, + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + want: sdk.SingleRecord{ + Operation: opencdc.OperationUpdate, + Metadata: opencdc.Metadata{ + "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", + "opencdc.readAt": "1706028953595546000", + "opencdc.version": "v1", + }, + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "event_id": float64(1747353650), + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "pg_generator": false, + "sensor_id": float64(1250383582), + "triggered": false, + }, + }, + Key: opencdc.RawData("17774941-57a2-42fa-b430-8912a9424b3a"), + Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + is := is.New(t) + ctx := context.Background() + + underTest := NewOpenCDCProcessor(log.Test(t)) + err := underTest.Configure(ctx, tt.config) + is.NoErr(err) + + got := underTest.Process(ctx, []opencdc.Record{tt.record}) + is.Equal(1, len(got)) + is.Equal("", cmp.Diff(tt.want, got[0], internal.CmpProcessedRecordOpts...)) + }) + } +} diff --git a/pkg/plugin/processor/builtin/impl/webhook/examples_exporter_test.go b/pkg/plugin/processor/builtin/impl/webhook/examples_exporter_test.go new file mode 100644 index 000000000..151f308e5 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/webhook/examples_exporter_test.go @@ -0,0 +1,34 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build export_processors + +package webhook + +import ( + "os" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" +) + +func TestMain(m *testing.M) { + code := m.Run() + if code > 0 { + os.Exit(code) + } + + // tests passed, export the processors + exampleutil.ExportProcessors() +} diff --git a/pkg/plugin/processor/builtin/impl/webhook/examples_test.go b/pkg/plugin/processor/builtin/impl/webhook/examples_test.go new file mode 100644 index 000000000..a68cd1cf3 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/webhook/examples_test.go @@ -0,0 +1,17 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate go test -count=1 -tags export_processors . + +package webhook diff --git a/pkg/plugin/processor/builtin/impl/webhook/http.go b/pkg/plugin/processor/builtin/impl/webhook/http.go new file mode 100644 index 000000000..4501fcf46 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/webhook/http.go @@ -0,0 +1,305 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate paramgen -output=http_paramgen.go httpConfig + +package webhook + +import ( + "bytes" + "context" + "io" + "net/http" + "strconv" + "time" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/goccy/go-json" + "github.com/jpillora/backoff" +) + +type httpConfig struct { + // URL used in the HTTP request. + URL string `json:"request.url" validate:"required"` + // Method is the HTTP request method to be used. + Method string `json:"request.method" default:"POST"` + // ContentType is the value of the Content-Type header. + ContentType string `json:"request.contentType" default:"application/json"` + + // BackoffRetryCount is the maximum number of retries for an individual record + // when backing off following an error. + BackoffRetryCount float64 `json:"backoffRetry.count" default:"0" validate:"gt=-1"` + // BackoffRetryFactor is the multiplying factor for each increment step. + BackoffRetryFactor float64 `json:"backoffRetry.factor" default:"2" validate:"gt=0"` + // BackoffRetryMin is the minimum waiting time before retrying. + BackoffRetryMin time.Duration `json:"backoffRetry.min" default:"100ms"` + // BackoffRetryMax is the maximum waiting time before retrying. + BackoffRetryMax time.Duration `json:"backoffRetry.max" default:"5s"` + + // RequestBodyRef specifies which field from the input record + // should be used as the body in the HTTP request. + // The value of this parameter should be a valid record field reference: + // See: sdk.NewReferenceResolver + RequestBodyRef string `json:"request.body" default:"."` + // ResponseBodyRef specifies to which field should the + // response body be saved to. + // The value of this parameter should be a valid record field reference: + // See: sdk.NewReferenceResolver + ResponseBodyRef string `json:"response.body" default:".Payload.After"` + // ResponseStatusRef specifies to which field should the + // response status be saved to. + // The value of this parameter should be a valid record field reference. + // If no value is set, then the response status will NOT be saved. + // See: sdk.NewReferenceResolver + ResponseStatusRef string `json:"response.status"` +} + +type httpProcessor struct { + sdk.UnimplementedProcessor + + logger log.CtxLogger + + config httpConfig + backoffCfg *backoff.Backoff + + requestBodyRef *sdk.ReferenceResolver + responseBodyRef *sdk.ReferenceResolver + responseStatusRef *sdk.ReferenceResolver +} + +func NewHTTPProcessor(l log.CtxLogger) sdk.Processor { + return &httpProcessor{logger: l.WithComponent("webhook.httpProcessor")} +} + +func (p *httpProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{ + Name: "webhook.http", + Summary: "HTTP webhook processor", + Description: `A processor that sends an HTTP request to the specified URL, retries on error and +saves the response body and, optionally, the response status.`, + Version: "v0.1.0", + Author: "Meroxa, Inc.", + Parameters: httpConfig{}.Parameters(), + }, nil +} + +func (p *httpProcessor) Configure(ctx context.Context, m map[string]string) error { + err := sdk.ParseConfig(ctx, m, &p.config, p.config.Parameters()) + if err != nil { + return cerrors.Errorf("failed parsing configuration: %w", err) + } + + if p.config.ResponseBodyRef == p.config.ResponseStatusRef { + return cerrors.New("invalid configuration: response.body and response.status set to same field") + } + + requestBodyRef, err := sdk.NewReferenceResolver(p.config.RequestBodyRef) + if err != nil { + return cerrors.Errorf("failed parsing request.body %v: %w", p.config.RequestBodyRef, err) + } + p.requestBodyRef = &requestBodyRef + + responseBodyRef, err := sdk.NewReferenceResolver(p.config.ResponseBodyRef) + if err != nil { + return cerrors.Errorf("failed parsing response.body %v: %w", p.config.ResponseBodyRef, err) + } + p.responseBodyRef = &responseBodyRef + + // This field is optional and, if not set, response status won't be saved. + if p.config.ResponseStatusRef != "" { + responseStatusRef, err := sdk.NewReferenceResolver(p.config.ResponseStatusRef) + if err != nil { + return cerrors.Errorf("failed parsing response.status %v: %w", p.config.ResponseStatusRef, err) + } + p.responseStatusRef = &responseStatusRef + } + + // preflight check + _, err = http.NewRequest(p.config.Method, p.config.URL, bytes.NewReader([]byte{})) + if err != nil { + return cerrors.Errorf("configuration check failed: %w", err) + } + + p.backoffCfg = &backoff.Backoff{ + Factor: p.config.BackoffRetryFactor, + Min: p.config.BackoffRetryMin, + Max: p.config.BackoffRetryMax, + } + return nil +} + +func (p *httpProcessor) Open(context.Context) error { + return nil +} + +func (p *httpProcessor) Process(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(records)) + for _, rec := range records { + proc, err := p.processRecordWithBackOff(ctx, rec) + if err != nil { + return append(out, sdk.ErrorRecord{Error: err}) + } + out = append(out, proc) + } + + return out +} + +func (p *httpProcessor) processRecordWithBackOff(ctx context.Context, r opencdc.Record) (sdk.ProcessedRecord, error) { + for { + processed, err := p.processRecord(ctx, r) + attempt := p.backoffCfg.Attempt() + duration := p.backoffCfg.Duration() + + if err != nil && attempt < p.config.BackoffRetryCount { + p.logger.Debug(ctx). + Err(err). + Float64("attempt", attempt). + Float64("backoffRetry.count", p.config.BackoffRetryCount). + Int64("backoffRetry.duration", duration.Milliseconds()). + Msg("retrying HTTP request") + + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-time.After(duration): + continue + } + } + p.backoffCfg.Reset() // reset for next processor execution + if err != nil { + return nil, err + } + + return processed, nil + } +} + +// processRecord processes a single record (without retries) +func (p *httpProcessor) processRecord(ctx context.Context, r opencdc.Record) (sdk.ProcessedRecord, error) { + var key []byte + if r.Key != nil { + key = r.Key.Bytes() + } + p.logger.Trace(ctx).Bytes("record_key", key).Msg("processing record") + + req, err := p.buildRequest(ctx, r) + if err != nil { + return nil, cerrors.Errorf("cannot create HTTP request: %w", err) + } + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return nil, cerrors.Errorf("error executing HTTP request: %w", err) + } + defer func() { + errClose := resp.Body.Close() + if errClose != nil { + p.logger.Debug(ctx). + Err(errClose). + Msg("failed closing response body (possible resource leak)") + } + }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, cerrors.Errorf("error reading response body: %w", err) + } + + if resp.StatusCode >= 300 { + // regard status codes over 299 as errors + return nil, cerrors.Errorf("error status code %v (body: %q)", resp.StatusCode, string(body)) + } + // skip if body has no content + if resp.StatusCode == http.StatusNoContent { + return sdk.FilterRecord{}, nil + } + + // Set response body + err = p.setField(&r, p.responseBodyRef, opencdc.RawData(body)) + if err != nil { + return nil, cerrors.Errorf("failed setting response body: %w", err) + } + err = p.setField(&r, p.responseStatusRef, strconv.Itoa(resp.StatusCode)) + if err != nil { + return nil, cerrors.Errorf("failed setting response status: %w", err) + } + + return sdk.SingleRecord(r), nil +} + +func (p *httpProcessor) buildRequest(ctx context.Context, r opencdc.Record) (*http.Request, error) { + reqBody, err := p.requestBody(r) + if err != nil { + return nil, cerrors.Errorf("failed getting request body: %w", err) + } + + req, err := http.NewRequestWithContext( + ctx, + p.config.Method, + p.config.URL, + bytes.NewReader(reqBody), + ) + if err != nil { + return nil, cerrors.Errorf("error creating HTTP request: %w", err) + } + + // todo make it possible to add more headers, e.g. auth headers etc. + req.Header.Set("Content-Type", p.config.ContentType) + + return req, nil +} + +// requestBody returns the request body for the given record, +// using the configured field reference (see: request.body configuration parameter). +func (p *httpProcessor) requestBody(r opencdc.Record) ([]byte, error) { + ref, err := p.requestBodyRef.Resolve(&r) + if err != nil { + return nil, cerrors.Errorf("failed resolving request.body: %w", err) + } + + val := ref.Get() + // Raw byte data should be sent as it is, as that's most often what we want + // If we json.Marshal it first, it will be Base64-encoded. + if raw, ok := val.(opencdc.RawData); ok { + return raw.Bytes(), nil + } + + return json.Marshal(val) +} + +func (p *httpProcessor) setField(r *opencdc.Record, refRes *sdk.ReferenceResolver, data any) error { + if refRes == nil { + return nil + } + + ref, err := refRes.Resolve(r) + if err != nil { + return err + } + + err = ref.Set(data) + if err != nil { + return err + } + + return nil +} + +func (p *httpProcessor) Teardown(context.Context) error { + return nil +} diff --git a/pkg/plugin/processor/builtin/impl/webhook/http_examples_test.go b/pkg/plugin/processor/builtin/impl/webhook/http_examples_test.go new file mode 100644 index 000000000..fde03fcc0 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/webhook/http_examples_test.go @@ -0,0 +1,108 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "io" + "log" + "net" + "net/http" + "net/http/httptest" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/exampleutil" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + conduit_log "github.com/conduitio/conduit/pkg/foundation/log" +) + +//nolint:govet // we're using a more descriptive name of example +func ExampleHTTPProcessor() { + p := NewHTTPProcessor(conduit_log.Nop()) + + srv := newTestServer() + // Stop the server on return from the function. + defer srv.Close() + + exampleutil.RunExample(p, exampleutil.Example{ + Summary: `Send a request to an HTTP server`, + Description: ` +This example shows how to use the HTTP processor to send a record's ` + ".Payload.After" + ` field to a dummy HTTP server +that replies back with a greeting. + +The record's ` + ".Payload.After" + ` is overwritten with the response. Additionally, the example shows how to store the +value of the HTTP response's code in the metadata field ` + "http_status" + `.`, + Config: map[string]string{ + "request.url": srv.URL, + "request.body": ".Payload.After", + "response.status": `.Metadata["http_status"]`, + }, + Have: opencdc.Record{ + Payload: opencdc.Change{ + After: opencdc.RawData("world"), + }, + }, + Want: sdk.SingleRecord{ + Metadata: map[string]string{ + "http_status": "200", + }, + Payload: opencdc.Change{ + After: opencdc.RawData("hello, world"), + }, + }, + }) + + // Output: + // processor transformed record: + // --- before + // +++ after + // @@ -1,10 +1,12 @@ + // { + // "position": null, + // "operation": "Operation(0)", + // - "metadata": null, + // + "metadata": { + // + "http_status": "200" + // + }, + // "key": null, + // "payload": { + // "before": null, + // - "after": "world" + // + "after": "hello, world" + // } + // } +} + +func newTestServer() *httptest.Server { + l, err := net.Listen("tcp", "127.0.0.1:54321") + if err != nil { + log.Fatalf("failed starting test server on port 54321: %v", err) + } + + srv := httptest.NewUnstartedServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + body, _ := io.ReadAll(req.Body) + _, _ = resp.Write([]byte("hello, " + string(body))) + })) + + // NewUnstartedServer creates a listener. Close that listener and replace + // with the one we created. + srv.Listener.Close() + srv.Listener = l + + // Start the server. + srv.Start() + + return srv +} diff --git a/pkg/plugin/processor/builtin/impl/webhook/http_paramgen.go b/pkg/plugin/processor/builtin/impl/webhook/http_paramgen.go new file mode 100644 index 000000000..55a8cf4d3 --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/webhook/http_paramgen.go @@ -0,0 +1,79 @@ +// Code generated by paramgen. DO NOT EDIT. +// Source: github.com/ConduitIO/conduit-commons/tree/main/paramgen + +package webhook + +import ( + "github.com/conduitio/conduit-commons/config" +) + +func (httpConfig) Parameters() map[string]config.Parameter { + return map[string]config.Parameter{ + "backoffRetry.count": { + Default: "0", + Description: "BackoffRetryCount is the maximum number of retries for an individual record\nwhen backing off following an error.", + Type: config.ParameterTypeFloat, + Validations: []config.Validation{ + config.ValidationGreaterThan{V: -1}, + }, + }, + "backoffRetry.factor": { + Default: "2", + Description: "BackoffRetryFactor is the multiplying factor for each increment step.", + Type: config.ParameterTypeFloat, + Validations: []config.Validation{ + config.ValidationGreaterThan{V: 0}, + }, + }, + "backoffRetry.max": { + Default: "5s", + Description: "BackoffRetryMax is the maximum waiting time before retrying.", + Type: config.ParameterTypeDuration, + Validations: []config.Validation{}, + }, + "backoffRetry.min": { + Default: "100ms", + Description: "BackoffRetryMin is the minimum waiting time before retrying.", + Type: config.ParameterTypeDuration, + Validations: []config.Validation{}, + }, + "request.body": { + Default: ".", + Description: "RequestBodyRef specifies which field from the input record\nshould be used as the body in the HTTP request.\nThe value of this parameter should be a valid record field reference:\nSee: sdk.NewReferenceResolver", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "request.contentType": { + Default: "application/json", + Description: "ContentType is the value of the Content-Type header.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "request.method": { + Default: "POST", + Description: "Method is the HTTP request method to be used.", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "request.url": { + Default: "", + Description: "URL used in the HTTP request.", + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + "response.body": { + Default: ".Payload.After", + Description: "ResponseBodyRef specifies to which field should the\nresponse body be saved to.\nThe value of this parameter should be a valid record field reference:\nSee: sdk.NewReferenceResolver", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + "response.status": { + Default: "", + Description: "ResponseStatusRef specifies to which field should the\nresponse status be saved to.\nThe value of this parameter should be a valid record field reference.\nIf no value is set, then the response status will NOT be saved.\nSee: sdk.NewReferenceResolver", + Type: config.ParameterTypeString, + Validations: []config.Validation{}, + }, + } +} diff --git a/pkg/plugin/processor/builtin/impl/webhook/http_test.go b/pkg/plugin/processor/builtin/impl/webhook/http_test.go new file mode 100644 index 000000000..cb3c7887f --- /dev/null +++ b/pkg/plugin/processor/builtin/impl/webhook/http_test.go @@ -0,0 +1,489 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package webhook + +import ( + "context" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/goccy/go-json" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + + "github.com/matryer/is" +) + +func TestHTTPProcessor_Configure(t *testing.T) { + tests := []struct { + name string + config map[string]string + wantErr string + }{ + { + name: "empty config returns error", + config: map[string]string{}, + wantErr: `failed parsing configuration: config invalid: error validating "request.url": required parameter is not provided`, + }, + { + name: "empty url returns error", + config: map[string]string{ + "request.url": "", + }, + wantErr: `failed parsing configuration: config invalid: error validating "request.url": required parameter is not provided`, + }, + { + name: "invalid url returns error", + config: map[string]string{ + "request.url": ":not/a/valid/url", + }, + wantErr: "configuration check failed: parse \":not/a/valid/url\": missing protocol scheme", + }, + { + name: "invalid method returns error", + config: map[string]string{ + "request.url": "http://example.com", + "request.method": ":foo", + }, + wantErr: "configuration check failed: net/http: invalid method \":foo\"", + }, + { + name: "invalid backoffRetry.count returns error", + config: map[string]string{ + "request.url": "http://example.com", + "backoffRetry.count": "not-a-number", + }, + wantErr: `failed parsing configuration: config invalid: error validating "backoffRetry.count": "not-a-number" value is not a float: invalid parameter type`, + }, + { + name: "invalid backoffRetry.min returns error", + config: map[string]string{ + "request.url": "http://example.com", + "backoffRetry.count": "1", + "backoffRetry.min": "not-a-duration", + }, + wantErr: `failed parsing configuration: config invalid: error validating "backoffRetry.min": "not-a-duration" value is not a duration: invalid parameter type`, + }, + { + name: "invalid backoffRetry.max returns error", + config: map[string]string{ + "request.url": "http://example.com", + "backoffRetry.count": "1", + "backoffRetry.max": "not-a-duration", + }, + wantErr: `failed parsing configuration: config invalid: error validating "backoffRetry.max": "not-a-duration" value is not a duration: invalid parameter type`, + }, + { + name: "invalid backoffRetry.factor returns error", + config: map[string]string{ + "request.url": "http://example.com", + "backoffRetry.count": "1", + "backoffRetry.factor": "not-a-number", + }, + wantErr: `failed parsing configuration: config invalid: error validating "backoffRetry.factor": "not-a-number" value is not a float: invalid parameter type`, + }, + { + name: "valid url returns processor", + config: map[string]string{ + "request.url": "http://example.com", + }, + wantErr: "", + }, + { + name: "valid url and method returns processor", + config: map[string]string{ + "request.url": "http://example.com", + "request.method": "GET", + }, + wantErr: "", + }, + { + name: "valid url, method and backoff retry config returns processor", + config: map[string]string{ + "request.url": "http://example.com", + "request.contentType": "application/json", + "backoffRetry.count": "1", + "backoffRetry.min": "10ms", + "backoffRetry.max": "1s", + "backoffRetry.factor": "1.3", + }, + wantErr: "", + }, + { + name: "invalid: same value of response.body and response.status", + config: map[string]string{ + "request.url": "http://example.com", + "response.body": ".Payload.After", + "response.status": ".Payload.After", + }, + wantErr: "invalid configuration: response.body and response.status set to same field", + }, + { + name: "valid response.body and response.status", + config: map[string]string{ + "request.url": "http://example.com", + "response.body": ".Payload.After", + "response.status": `.Metadata["response.status"]`, + }, + wantErr: "", + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + underTest := NewHTTPProcessor(log.Test(t)) + err := underTest.Configure(context.Background(), tc.config) + if tc.wantErr == "" { + is.NoErr(err) + } else { + is.True(err != nil) + is.Equal(tc.wantErr, err.Error()) + } + }) + } +} + +func TestHTTPProcessor_Success(t *testing.T) { + respBody := []byte("foo-bar/response") + + tests := []struct { + name string + config map[string]string + args []opencdc.Record + want []sdk.ProcessedRecord + }{ + { + name: "structured data", + config: map[string]string{"request.method": "GET"}, + args: []opencdc.Record{{ + Payload: opencdc.Change{ + Before: nil, + After: opencdc.StructuredData{ + "bar": 123, + "baz": nil, + }, + }, + }}, + want: []sdk.ProcessedRecord{sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.RawData(respBody), + }, + }, + }, + }, + { + name: "raw data", + config: map[string]string{}, + args: []opencdc.Record{{ + Payload: opencdc.Change{ + After: opencdc.RawData("random data"), + }, + }}, + want: []sdk.ProcessedRecord{sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.RawData(respBody), + }, + }}, + }, + { + name: "custom field for response body and status", + config: map[string]string{ + "response.body": ".Payload.After.body", + "response.status": ".Payload.After.status", + }, + args: []opencdc.Record{{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "a key": "random data", + }, + }, + }}, + want: []sdk.ProcessedRecord{sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "a key": "random data", + "body": opencdc.RawData(respBody), + "status": "200", + }, + }, + }}, + }, + { + name: "request body: custom field, structured", + config: map[string]string{ + "request.body": ".Payload.Before", + "response.body": ".Payload.After.httpResponse", + }, + args: []opencdc.Record{{ + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "before-key": "before-data", + }, + After: opencdc.StructuredData{ + "after-key": "after-data", + }, + }, + }}, + want: []sdk.ProcessedRecord{sdk.SingleRecord{ + Payload: opencdc.Change{ + Before: opencdc.StructuredData{ + "before-key": "before-data", + }, + After: opencdc.StructuredData{ + "after-key": "after-data", + "httpResponse": opencdc.RawData("foo-bar/response"), + }, + }, + }}, + }, + { + name: "request body: custom field, raw data", + config: map[string]string{ + "request.body": ".Payload.Before", + "response.body": ".Payload.After.httpResponse", + }, + args: []opencdc.Record{{ + Payload: opencdc.Change{ + Before: opencdc.RawData("uncooked data"), + After: opencdc.StructuredData{ + "after-key": "after-data", + }, + }, + }}, + want: []sdk.ProcessedRecord{sdk.SingleRecord{ + Payload: opencdc.Change{ + Before: opencdc.RawData("uncooked data"), + After: opencdc.StructuredData{ + "after-key": "after-data", + "httpResponse": opencdc.RawData("foo-bar/response"), + }, + }, + }}, + }, + { + name: "request body: custom field, []byte data", + config: map[string]string{ + "request.body": ".Payload.After.contents", + "response.body": ".Payload.After.httpResponse", + }, + args: []opencdc.Record{{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "contents": []byte{15, 2, 20, 24}, + "after-key": "after-data", + }, + }, + }}, + want: []sdk.ProcessedRecord{sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.StructuredData{ + "after-key": "after-data", + "contents": []byte{15, 2, 20, 24}, + "httpResponse": opencdc.RawData("foo-bar/response"), + }, + }, + }}, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + wantMethod := tc.config["request.method"] + if wantMethod == "" { + wantMethod = "POST" // default + } + + wantBody := getRequestBody(is, tc.config["request.body"], tc.args) + + srv := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + is.Equal(wantMethod, req.Method) + + gotBody, err := io.ReadAll(req.Body) + is.NoErr(err) + is.Equal(wantBody, gotBody) + + _, err = resp.Write(respBody) + is.NoErr(err) + })) + defer srv.Close() + + tc.config["request.url"] = srv.URL + underTest := NewHTTPProcessor(log.Test(t)) + err := underTest.Configure(context.Background(), tc.config) + is.NoErr(err) + + got := underTest.Process(context.Background(), tc.args) + diff := cmp.Diff(tc.want, got, cmpopts.IgnoreUnexported(sdk.SingleRecord{})) + if diff != "" { + t.Logf("mismatch (-want +got): %s", diff) + t.Fail() + } + }) + } +} + +func TestHTTPProcessor_RetrySuccess(t *testing.T) { + is := is.New(t) + + respBody := []byte("foo-bar/response") + + wantMethod := "POST" + rec := []opencdc.Record{ + {Payload: opencdc.Change{After: opencdc.RawData("random data")}}, + } + wantBody := rec[0].Bytes() + + srvHandlerCount := 0 + + srv := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + srvHandlerCount++ + + is.Equal(wantMethod, req.Method) + + gotBody, err := io.ReadAll(req.Body) + is.NoErr(err) + is.Equal(wantBody, gotBody) + + if srvHandlerCount < 5 { + // first 4 requests will fail with an internal server error + resp.WriteHeader(http.StatusInternalServerError) + } else { + _, err := resp.Write(respBody) + is.NoErr(err) + } + })) + defer srv.Close() + + config := map[string]string{ + "request.url": srv.URL, + "backoffRetry.count": "4", + "backoffRetry.min": "5ms", + "backoffRetry.max": "10ms", + "backoffRetry.factor": "1.2", + } + + underTest := NewHTTPProcessor(log.Test(t)) + err := underTest.Configure(context.Background(), config) + is.NoErr(err) + + got := underTest.Process(context.Background(), rec) + is.Equal( + got, + []sdk.ProcessedRecord{sdk.SingleRecord{ + Payload: opencdc.Change{ + After: opencdc.RawData(respBody), + }, + }}, + ) + is.Equal(srvHandlerCount, 5) +} + +func TestHTTPProcessor_RetryFail(t *testing.T) { + is := is.New(t) + + srvHandlerCount := 0 + + srv := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + srvHandlerCount++ + // all requests fail + resp.WriteHeader(http.StatusInternalServerError) + })) + defer srv.Close() + + config := map[string]string{ + "request.url": srv.URL, + "backoffRetry.count": "5", + "backoffRetry.min": "5ms", + "backoffRetry.max": "10ms", + "backoffRetry.factor": "1.2", + } + + underTest := NewHTTPProcessor(log.Test(t)) + err := underTest.Configure(context.Background(), config) + is.NoErr(err) + + got := underTest.Process( + context.Background(), + []opencdc.Record{{Payload: opencdc.Change{After: opencdc.RawData("something")}}}, + ) + is.Equal(1, len(got)) + _, isErr := got[0].(sdk.ErrorRecord) + is.True(isErr) // expected an error + is.Equal(srvHandlerCount, 6) // expected 6 requests (1 regular and 5 retries) +} + +func TestHTTPProcessor_FilterRecord(t *testing.T) { + is := is.New(t) + + wantMethod := "POST" + rec := []opencdc.Record{ + {Payload: opencdc.Change{After: opencdc.RawData("random data")}}, + } + + wantBody := getRequestBody(is, ".", rec) + + srv := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { + is.Equal(wantMethod, req.Method) + + gotBody, err := io.ReadAll(req.Body) + is.NoErr(err) + is.Equal(wantBody, gotBody) + + resp.WriteHeader(http.StatusNoContent) + })) + defer srv.Close() + + config := map[string]string{ + "request.url": srv.URL, + } + + underTest := NewHTTPProcessor(log.Test(t)) + err := underTest.Configure(context.Background(), config) + is.NoErr(err) + + got := underTest.Process(context.Background(), rec) + is.Equal(got, []sdk.ProcessedRecord{sdk.FilterRecord{}}) +} + +func getRequestBody(is *is.I, field string, records []opencdc.Record) []byte { + f := field + if f == "" { + f = "." + } + + refRes, err := sdk.NewReferenceResolver(f) + is.NoErr(err) + + ref, err := refRes.Resolve(&records[0]) + is.NoErr(err) + + val := ref.Get() + if raw, ok := val.(opencdc.RawData); ok { + return raw.Bytes() + } + + bytes, err := json.Marshal(ref.Get()) + is.NoErr(err) + + return bytes +} diff --git a/pkg/plugin/processor/builtin/internal/diff/README.md b/pkg/plugin/processor/builtin/internal/diff/README.md new file mode 100644 index 000000000..f8e22c602 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/README.md @@ -0,0 +1,18 @@ +# Diff + +This package contains code taken from +on February 15th, 2024. We need the code to create a unified diff between two strings. + +The code is left as-is, except 3 changes: + +- The imports were changed to reference the Conduit module path. This was done + using the following command: + + ```sh + find . -type f -exec sed -i '' 's/golang.org\/x\/tools\/internal/github.com\/conduitio\/conduit\/pkg\/plugin\/processor\/builtin\/internal/g' {} + + ``` + +- The package `golang.org/x/tools/internal/diff/myers` was removed, as it's deprecated. + +- The package `golang.org/x/tools/internal/testenv` was added into the `diff` package, + as that's the only place it's used. It also only includes the required functions. diff --git a/pkg/plugin/processor/builtin/internal/diff/diff.go b/pkg/plugin/processor/builtin/internal/diff/diff.go new file mode 100644 index 000000000..a13547b7a --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/diff.go @@ -0,0 +1,176 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package diff computes differences between text files or strings. +package diff + +import ( + "fmt" + "sort" + "strings" +) + +// An Edit describes the replacement of a portion of a text file. +type Edit struct { + Start, End int // byte offsets of the region to replace + New string // the replacement +} + +func (e Edit) String() string { + return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) +} + +// Apply applies a sequence of edits to the src buffer and returns the +// result. Edits are applied in order of start offset; edits with the +// same start offset are applied in they order they were provided. +// +// Apply returns an error if any edit is out of bounds, +// or if any pair of edits is overlapping. +func Apply(src string, edits []Edit) (string, error) { + edits, size, err := validate(src, edits) + if err != nil { + return "", err + } + + // Apply edits. + out := make([]byte, 0, size) + lastEnd := 0 + for _, edit := range edits { + if lastEnd < edit.Start { + out = append(out, src[lastEnd:edit.Start]...) + } + out = append(out, edit.New...) + lastEnd = edit.End + } + out = append(out, src[lastEnd:]...) + + if len(out) != size { + panic("wrong size") + } + + return string(out), nil +} + +// ApplyBytes is like Apply, but it accepts a byte slice. +// The result is always a new array. +func ApplyBytes(src []byte, edits []Edit) ([]byte, error) { + res, err := Apply(string(src), edits) + return []byte(res), err +} + +// validate checks that edits are consistent with src, +// and returns the size of the patched output. +// It may return a different slice. +func validate(src string, edits []Edit) ([]Edit, int, error) { + if !sort.IsSorted(editsSort(edits)) { + edits = append([]Edit(nil), edits...) + SortEdits(edits) + } + + // Check validity of edits and compute final size. + size := len(src) + lastEnd := 0 + for _, edit := range edits { + if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { + return nil, 0, fmt.Errorf("diff has out-of-bounds edits") + } + if edit.Start < lastEnd { + return nil, 0, fmt.Errorf("diff has overlapping edits") + } + size += len(edit.New) + edit.Start - edit.End + lastEnd = edit.End + } + + return edits, size, nil +} + +// SortEdits orders a slice of Edits by (start, end) offset. +// This ordering puts insertions (end = start) before deletions +// (end > start) at the same point, but uses a stable sort to preserve +// the order of multiple insertions at the same point. +// (Apply detects multiple deletions at the same point as an error.) +func SortEdits(edits []Edit) { + sort.Stable(editsSort(edits)) +} + +type editsSort []Edit + +func (a editsSort) Len() int { return len(a) } +func (a editsSort) Less(i, j int) bool { + if cmp := a[i].Start - a[j].Start; cmp != 0 { + return cmp < 0 + } + return a[i].End < a[j].End +} +func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] } + +// lineEdits expands and merges a sequence of edits so that each +// resulting edit replaces one or more complete lines. +// See ApplyEdits for preconditions. +func lineEdits(src string, edits []Edit) ([]Edit, error) { + edits, _, err := validate(src, edits) + if err != nil { + return nil, err + } + + // Do all deletions begin and end at the start of a line, + // and all insertions end with a newline? + // (This is merely a fast path.) + for _, edit := range edits { + if edit.Start >= len(src) || // insertion at EOF + edit.Start > 0 && src[edit.Start-1] != '\n' || // not at line start + edit.End > 0 && src[edit.End-1] != '\n' || // not at line start + edit.New != "" && edit.New[len(edit.New)-1] != '\n' { // partial insert + goto expand // slow path + } + } + return edits, nil // aligned + +expand: + if len(edits) == 0 { + return edits, nil // no edits (unreachable due to fast path) + } + expanded := make([]Edit, 0, len(edits)) // a guess + prev := edits[0] + // TODO(adonovan): opt: start from the first misaligned edit. + // TODO(adonovan): opt: avoid quadratic cost of string += string. + for _, edit := range edits[1:] { + between := src[prev.End:edit.Start] + if !strings.Contains(between, "\n") { + // overlapping lines: combine with previous edit. + prev.New += between + edit.New + prev.End = edit.End + } else { + // non-overlapping lines: flush previous edit. + expanded = append(expanded, expandEdit(prev, src)) + prev = edit + } + } + return append(expanded, expandEdit(prev, src)), nil // flush final edit +} + +// expandEdit returns edit expanded to complete whole lines. +func expandEdit(edit Edit, src string) Edit { + // Expand start left to start of line. + // (delta is the zero-based column number of start.) + start := edit.Start + if delta := start - 1 - strings.LastIndex(src[:start], "\n"); delta > 0 { + edit.Start -= delta + edit.New = src[start-delta:start] + edit.New + } + + // Expand end right to end of line. + end := edit.End + if end > 0 && src[end-1] != '\n' || + edit.New != "" && edit.New[len(edit.New)-1] != '\n' { + if nl := strings.IndexByte(src[end:], '\n'); nl < 0 { + edit.End = len(src) // extend to EOF + } else { + edit.End = end + nl + 1 // extend beyond \n + } + } + edit.New += src[end:edit.End] + + return edit +} diff --git a/pkg/plugin/processor/builtin/internal/diff/diff_test.go b/pkg/plugin/processor/builtin/internal/diff/diff_test.go new file mode 100644 index 000000000..055384679 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/diff_test.go @@ -0,0 +1,207 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package diff_test + +import ( + "bytes" + "math/rand" + "os" + "os/exec" + "path/filepath" + "reflect" + "strings" + "testing" + "unicode/utf8" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/difftest" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/testenv" +) + +func TestApply(t *testing.T) { + for _, tc := range difftest.TestCases { + t.Run(tc.Name, func(t *testing.T) { + got, err := diff.Apply(tc.In, tc.Edits) + if err != nil { + t.Fatalf("Apply(Edits) failed: %v", err) + } + if got != tc.Out { + t.Errorf("Apply(Edits): got %q, want %q", got, tc.Out) + } + if tc.LineEdits != nil { + got, err := diff.Apply(tc.In, tc.LineEdits) + if err != nil { + t.Fatalf("Apply(LineEdits) failed: %v", err) + } + if got != tc.Out { + t.Errorf("Apply(LineEdits): got %q, want %q", got, tc.Out) + } + } + }) + } +} + +func TestNEdits(t *testing.T) { + for _, tc := range difftest.TestCases { + edits := diff.Strings(tc.In, tc.Out) + got, err := diff.Apply(tc.In, edits) + if err != nil { + t.Fatalf("Apply failed: %v", err) + } + if got != tc.Out { + t.Fatalf("%s: got %q wanted %q", tc.Name, got, tc.Out) + } + if len(edits) < len(tc.Edits) { // should find subline edits + t.Errorf("got %v, expected %v for %#v", edits, tc.Edits, tc) + } + } +} + +func TestNRandom(t *testing.T) { + rand.Seed(1) + for i := 0; i < 1000; i++ { + a := randstr("abω", 16) + b := randstr("abωc", 16) + edits := diff.Strings(a, b) + got, err := diff.Apply(a, edits) + if err != nil { + t.Fatalf("Apply failed: %v", err) + } + if got != b { + t.Fatalf("%d: got %q, wanted %q, starting with %q", i, got, b, a) + } + } +} + +// $ go test -fuzz=FuzzRoundTrip ./internal/diff +func FuzzRoundTrip(f *testing.F) { + f.Fuzz(func(t *testing.T, a, b string) { + if !utf8.ValidString(a) || !utf8.ValidString(b) { + return // inputs must be text + } + edits := diff.Strings(a, b) + got, err := diff.Apply(a, edits) + if err != nil { + t.Fatalf("Apply failed: %v", err) + } + if got != b { + t.Fatalf("applying diff(%q, %q) gives %q; edits=%v", a, b, got, edits) + } + }) +} + +func TestLineEdits(t *testing.T) { + for _, tc := range difftest.TestCases { + t.Run(tc.Name, func(t *testing.T) { + want := tc.LineEdits + if want == nil { + want = tc.Edits // already line-aligned + } + got, err := diff.LineEdits(tc.In, tc.Edits) + if err != nil { + t.Fatalf("LineEdits: %v", err) + } + if !reflect.DeepEqual(got, want) { + t.Errorf("in=<<%s>>\nout=<<%s>>\nraw edits=%s\nline edits=%s\nwant: %s", + tc.In, tc.Out, tc.Edits, got, want) + } + // make sure that applying the edits gives the expected result + fixed, err := diff.Apply(tc.In, got) + if err != nil { + t.Error(err) + } + if fixed != tc.Out { + t.Errorf("Apply(LineEdits): got %q, want %q", fixed, tc.Out) + } + }) + } +} + +func TestToUnified(t *testing.T) { + testenv.NeedsTool(t, "patch") + for _, tc := range difftest.TestCases { + t.Run(tc.Name, func(t *testing.T) { + unified, err := diff.ToUnified(difftest.FileA, difftest.FileB, tc.In, tc.Edits, diff.DefaultContextLines) + if err != nil { + t.Fatal(err) + } + if unified == "" { + return + } + orig := filepath.Join(t.TempDir(), "original") + err = os.WriteFile(orig, []byte(tc.In), 0644) + if err != nil { + t.Fatal(err) + } + temp := filepath.Join(t.TempDir(), "patched") + err = os.WriteFile(temp, []byte(tc.In), 0644) + if err != nil { + t.Fatal(err) + } + cmd := exec.Command("patch", "-p0", "-u", "-s", "-o", temp, orig) + cmd.Stdin = strings.NewReader(unified) + cmd.Stdout = new(bytes.Buffer) + cmd.Stderr = new(bytes.Buffer) + if err = cmd.Run(); err != nil { + t.Fatalf("%v: %q (%q) (%q)", err, cmd.String(), + cmd.Stderr, cmd.Stdout) + } + got, err := os.ReadFile(temp) + if err != nil { + t.Fatal(err) + } + if string(got) != tc.Out { + t.Errorf("applying unified failed: got\n%q, wanted\n%q unified\n%q", + got, tc.Out, unified) + } + + }) + } +} + +func TestRegressionOld001(t *testing.T) { + a := "// Copyright 2019 The Go Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style\n// license that can be found in the LICENSE file.\n\npackage diff_test\n\nimport (\n\t\"fmt\"\n\t\"math/rand\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"golang.org/x/tools/gopls/internal/lsp/diff\"\n\t\"github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/difftest\"\n\t\"golang.org/x/tools/gopls/internal/span\"\n)\n" + + b := "// Copyright 2019 The Go Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style\n// license that can be found in the LICENSE file.\n\npackage diff_test\n\nimport (\n\t\"fmt\"\n\t\"math/rand\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"github.com/google/safehtml/template\"\n\t\"golang.org/x/tools/gopls/internal/lsp/diff\"\n\t\"github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/difftest\"\n\t\"golang.org/x/tools/gopls/internal/span\"\n)\n" + diffs := diff.Strings(a, b) + got, err := diff.Apply(a, diffs) + if err != nil { + t.Fatalf("Apply failed: %v", err) + } + if got != b { + i := 0 + for ; i < len(a) && i < len(b) && got[i] == b[i]; i++ { + } + t.Errorf("oops %vd\n%q\n%q", diffs, got, b) + t.Errorf("\n%q\n%q", got[i:], b[i:]) + } +} + +func TestRegressionOld002(t *testing.T) { + a := "n\"\n)\n" + b := "n\"\n\t\"golang.org/x//nnal/stack\"\n)\n" + diffs := diff.Strings(a, b) + got, err := diff.Apply(a, diffs) + if err != nil { + t.Fatalf("Apply failed: %v", err) + } + if got != b { + i := 0 + for ; i < len(a) && i < len(b) && got[i] == b[i]; i++ { + } + t.Errorf("oops %vd\n%q\n%q", diffs, got, b) + t.Errorf("\n%q\n%q", got[i:], b[i:]) + } +} + +// return a random string of length n made of characters from s +func randstr(s string, n int) string { + src := []rune(s) + x := make([]rune, n) + for i := 0; i < n; i++ { + x[i] = src[rand.Intn(len(src))] + } + return string(x) +} diff --git a/pkg/plugin/processor/builtin/internal/diff/difftest/difftest.go b/pkg/plugin/processor/builtin/internal/diff/difftest/difftest.go new file mode 100644 index 000000000..bdb51cfa6 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/difftest/difftest.go @@ -0,0 +1,324 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package difftest supplies a set of tests that will operate on any +// implementation of a diff algorithm as exposed by +// "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff" +package difftest + +// There are two kinds of tests, semantic tests, and 'golden data' tests. +// The semantic tests check that the computed diffs transform the input to +// the output, and that 'patch' accepts the computed unified diffs. +// The other tests just check that Edits and LineEdits haven't changed +// unexpectedly. These fields may need to be changed when the diff algorithm +// changes. + +import ( + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff" +) + +const ( + FileA = "from" + FileB = "to" + UnifiedPrefix = "--- " + FileA + "\n+++ " + FileB + "\n" +) + +var TestCases = []struct { + Name, In, Out, Unified string + Edits, LineEdits []diff.Edit // expectation (LineEdits=nil => already line-aligned) + NoDiff bool +}{{ + Name: "empty", + In: "", + Out: "", +}, { + Name: "no_diff", + In: "gargantuan\n", + Out: "gargantuan\n", +}, { + Name: "replace_all", + In: "fruit\n", + Out: "cheese\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-fruit ++cheese +`[1:], + Edits: []diff.Edit{{Start: 0, End: 5, New: "cheese"}}, + LineEdits: []diff.Edit{{Start: 0, End: 6, New: "cheese\n"}}, +}, { + Name: "insert_rune", + In: "gord\n", + Out: "gourd\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-gord ++gourd +`[1:], + Edits: []diff.Edit{{Start: 2, End: 2, New: "u"}}, + LineEdits: []diff.Edit{{Start: 0, End: 5, New: "gourd\n"}}, +}, { + Name: "delete_rune", + In: "groat\n", + Out: "goat\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-groat ++goat +`[1:], + Edits: []diff.Edit{{Start: 1, End: 2, New: ""}}, + LineEdits: []diff.Edit{{Start: 0, End: 6, New: "goat\n"}}, +}, { + Name: "replace_rune", + In: "loud\n", + Out: "lord\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-loud ++lord +`[1:], + Edits: []diff.Edit{{Start: 2, End: 3, New: "r"}}, + LineEdits: []diff.Edit{{Start: 0, End: 5, New: "lord\n"}}, +}, { + Name: "replace_partials", + In: "blanket\n", + Out: "bunker\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-blanket ++bunker +`[1:], + Edits: []diff.Edit{ + {Start: 1, End: 3, New: "u"}, + {Start: 6, End: 7, New: "r"}, + }, + LineEdits: []diff.Edit{{Start: 0, End: 8, New: "bunker\n"}}, +}, { + Name: "insert_line", + In: "1: one\n3: three\n", + Out: "1: one\n2: two\n3: three\n", + Unified: UnifiedPrefix + ` +@@ -1,2 +1,3 @@ + 1: one ++2: two + 3: three +`[1:], + Edits: []diff.Edit{{Start: 7, End: 7, New: "2: two\n"}}, +}, { + Name: "replace_no_newline", + In: "A", + Out: "B", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-A +\ No newline at end of file ++B +\ No newline at end of file +`[1:], + Edits: []diff.Edit{{Start: 0, End: 1, New: "B"}}, +}, { + Name: "delete_empty", + In: "meow", + Out: "", // GNU diff -u special case: +0,0 + Unified: UnifiedPrefix + ` +@@ -1 +0,0 @@ +-meow +\ No newline at end of file +`[1:], + Edits: []diff.Edit{{Start: 0, End: 4, New: ""}}, + LineEdits: []diff.Edit{{Start: 0, End: 4, New: ""}}, +}, { + Name: "append_empty", + In: "", // GNU diff -u special case: -0,0 + Out: "AB\nC", + Unified: UnifiedPrefix + ` +@@ -0,0 +1,2 @@ ++AB ++C +\ No newline at end of file +`[1:], + Edits: []diff.Edit{{Start: 0, End: 0, New: "AB\nC"}}, + LineEdits: []diff.Edit{{Start: 0, End: 0, New: "AB\nC"}}, +}, + // TODO(adonovan): fix this test: GNU diff -u prints "+1,2", Unifies prints "+1,3". + // { + // Name: "add_start", + // In: "A", + // Out: "B\nCA", + // Unified: UnifiedPrefix + ` + // @@ -1 +1,2 @@ + // -A + // \ No newline at end of file + // +B + // +CA + // \ No newline at end of file + // `[1:], + // Edits: []diff.TextEdit{{Span: newSpan(0, 0), NewText: "B\nC"}}, + // LineEdits: []diff.TextEdit{{Span: newSpan(0, 0), NewText: "B\nC"}}, + // }, + { + Name: "add_end", + In: "A", + Out: "AB", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-A +\ No newline at end of file ++AB +\ No newline at end of file +`[1:], + Edits: []diff.Edit{{Start: 1, End: 1, New: "B"}}, + LineEdits: []diff.Edit{{Start: 0, End: 1, New: "AB"}}, + }, { + Name: "add_empty", + In: "", + Out: "AB\nC", + Unified: UnifiedPrefix + ` +@@ -0,0 +1,2 @@ ++AB ++C +\ No newline at end of file +`[1:], + Edits: []diff.Edit{{Start: 0, End: 0, New: "AB\nC"}}, + LineEdits: []diff.Edit{{Start: 0, End: 0, New: "AB\nC"}}, + }, { + Name: "add_newline", + In: "A", + Out: "A\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-A +\ No newline at end of file ++A +`[1:], + Edits: []diff.Edit{{Start: 1, End: 1, New: "\n"}}, + LineEdits: []diff.Edit{{Start: 0, End: 1, New: "A\n"}}, + }, { + Name: "delete_front", + In: "A\nB\nC\nA\nB\nB\nA\n", + Out: "C\nB\nA\nB\nA\nC\n", + Unified: UnifiedPrefix + ` +@@ -1,7 +1,6 @@ +-A +-B + C ++B + A + B +-B + A ++C +`[1:], + NoDiff: true, // unified diff is different but valid + Edits: []diff.Edit{ + {Start: 0, End: 4, New: ""}, + {Start: 6, End: 6, New: "B\n"}, + {Start: 10, End: 12, New: ""}, + {Start: 14, End: 14, New: "C\n"}, + }, + LineEdits: []diff.Edit{ + {Start: 0, End: 4, New: ""}, + {Start: 6, End: 6, New: "B\n"}, + {Start: 10, End: 12, New: ""}, + {Start: 14, End: 14, New: "C\n"}, + }, + }, { + Name: "replace_last_line", + In: "A\nB\n", + Out: "A\nC\n\n", + Unified: UnifiedPrefix + ` +@@ -1,2 +1,3 @@ + A +-B ++C ++ +`[1:], + Edits: []diff.Edit{{Start: 2, End: 3, New: "C\n"}}, + LineEdits: []diff.Edit{{Start: 2, End: 4, New: "C\n\n"}}, + }, + { + Name: "multiple_replace", + In: "A\nB\nC\nD\nE\nF\nG\n", + Out: "A\nH\nI\nJ\nE\nF\nK\n", + Unified: UnifiedPrefix + ` +@@ -1,7 +1,7 @@ + A +-B +-C +-D ++H ++I ++J + E + F +-G ++K +`[1:], + Edits: []diff.Edit{ + {Start: 2, End: 8, New: "H\nI\nJ\n"}, + {Start: 12, End: 14, New: "K\n"}, + }, + NoDiff: true, // diff algorithm produces different delete/insert pattern + }, + { + Name: "extra_newline", + In: "\nA\n", + Out: "A\n", + Edits: []diff.Edit{{Start: 0, End: 1, New: ""}}, + Unified: UnifiedPrefix + `@@ -1,2 +1 @@ +- + A +`, + }, { + Name: "unified_lines", + In: "aaa\nccc\n", + Out: "aaa\nbbb\nccc\n", + Edits: []diff.Edit{{Start: 3, End: 3, New: "\nbbb"}}, + LineEdits: []diff.Edit{{Start: 0, End: 4, New: "aaa\nbbb\n"}}, + Unified: UnifiedPrefix + "@@ -1,2 +1,3 @@\n aaa\n+bbb\n ccc\n", + }, { + Name: "60379", + In: `package a + +type S struct { +s fmt.Stringer +} +`, + Out: `package a + +type S struct { + s fmt.Stringer +} +`, + Edits: []diff.Edit{{Start: 27, End: 27, New: "\t"}}, + LineEdits: []diff.Edit{{Start: 27, End: 42, New: "\ts fmt.Stringer\n"}}, + Unified: UnifiedPrefix + "@@ -1,5 +1,5 @@\n package a\n \n type S struct {\n-s fmt.Stringer\n+\ts fmt.Stringer\n }\n", + }, +} + +func DiffTest(t *testing.T, compute func(before, after string) []diff.Edit) { + for _, test := range TestCases { + t.Run(test.Name, func(t *testing.T) { + edits := compute(test.In, test.Out) + got, err := diff.Apply(test.In, edits) + if err != nil { + t.Fatalf("Apply failed: %v", err) + } + unified, err := diff.ToUnified(FileA, FileB, test.In, edits, diff.DefaultContextLines) + if err != nil { + t.Fatalf("ToUnified: %v", err) + } + if got != test.Out { + t.Errorf("Apply: got patched:\n%v\nfrom diff:\n%v\nexpected:\n%v", + got, unified, test.Out) + } + if !test.NoDiff && unified != test.Unified { + t.Errorf("Unified: got diff:\n%q\nexpected:\n%q diffs:%v", + unified, test.Unified, edits) + } + }) + } +} diff --git a/pkg/plugin/processor/builtin/internal/diff/difftest/difftest_test.go b/pkg/plugin/processor/builtin/internal/diff/difftest/difftest_test.go new file mode 100644 index 000000000..5ff4aae05 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/difftest/difftest_test.go @@ -0,0 +1,82 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package difftest supplies a set of tests that will operate on any +// implementation of a diff algorithm as exposed by +// "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff" +package difftest_test + +import ( + "fmt" + "os" + "os/exec" + "strings" + "testing" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/difftest" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/testenv" +) + +func TestVerifyUnified(t *testing.T) { + testenv.NeedsTool(t, "diff") + for _, test := range difftest.TestCases { + t.Run(test.Name, func(t *testing.T) { + if test.NoDiff { + t.Skip("diff tool produces expected different results") + } + diff, err := getDiffOutput(test.In, test.Out) + if err != nil { + t.Fatal(err) + } + if len(diff) > 0 { + diff = difftest.UnifiedPrefix + diff + } + if diff != test.Unified { + t.Errorf("unified:\n%s\ndiff -u:\n%s", test.Unified, diff) + } + }) + } +} + +func getDiffOutput(a, b string) (string, error) { + fileA, err := os.CreateTemp("", "myers.in") + if err != nil { + return "", err + } + defer os.Remove(fileA.Name()) + if _, err := fileA.Write([]byte(a)); err != nil { + return "", err + } + if err := fileA.Close(); err != nil { + return "", err + } + fileB, err := os.CreateTemp("", "myers.in") + if err != nil { + return "", err + } + defer os.Remove(fileB.Name()) + if _, err := fileB.Write([]byte(b)); err != nil { + return "", err + } + if err := fileB.Close(); err != nil { + return "", err + } + cmd := exec.Command("diff", "-u", fileA.Name(), fileB.Name()) + cmd.Env = append(cmd.Env, "LANG=en_US.UTF-8") + out, err := cmd.CombinedOutput() + if err != nil { + if _, ok := err.(*exec.ExitError); !ok { + return "", fmt.Errorf("failed to run diff -u %v %v: %v\n%v", fileA.Name(), fileB.Name(), err, string(out)) + } + } + diff := string(out) + if len(diff) <= 0 { + return diff, nil + } + bits := strings.SplitN(diff, "\n", 3) + if len(bits) != 3 { + return "", fmt.Errorf("diff output did not have file prefix:\n%s", diff) + } + return bits[2], nil +} diff --git a/pkg/plugin/processor/builtin/internal/diff/export_test.go b/pkg/plugin/processor/builtin/internal/diff/export_test.go new file mode 100644 index 000000000..eedf0dd77 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/export_test.go @@ -0,0 +1,9 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package diff + +// This file exports some private declarations to tests. + +var LineEdits = lineEdits diff --git a/pkg/plugin/processor/builtin/internal/diff/lcs/common.go b/pkg/plugin/processor/builtin/internal/diff/lcs/common.go new file mode 100644 index 000000000..c3e82dd26 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/lcs/common.go @@ -0,0 +1,179 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lcs + +import ( + "log" + "sort" +) + +// lcs is a longest common sequence +type lcs []diag + +// A diag is a piece of the edit graph where A[X+i] == B[Y+i], for 0<=i l[j].Len + }) + return l +} + +// validate that the elements of the lcs do not overlap +// (can only happen when the two-sided algorithm ends early) +// expects the lcs to be sorted +func (l lcs) valid() bool { + for i := 1; i < len(l); i++ { + if l[i-1].X+l[i-1].Len > l[i].X { + return false + } + if l[i-1].Y+l[i-1].Len > l[i].Y { + return false + } + } + return true +} + +// repair overlapping lcs +// only called if two-sided stops early +func (l lcs) fix() lcs { + // from the set of diagonals in l, find a maximal non-conflicting set + // this problem may be NP-complete, but we use a greedy heuristic, + // which is quadratic, but with a better data structure, could be D log D. + // indepedent is not enough: {0,3,1} and {3,0,2} can't both occur in an lcs + // which has to have monotone x and y + if len(l) == 0 { + return nil + } + sort.Slice(l, func(i, j int) bool { return l[i].Len > l[j].Len }) + tmp := make(lcs, 0, len(l)) + tmp = append(tmp, l[0]) + for i := 1; i < len(l); i++ { + var dir direction + nxt := l[i] + for _, in := range tmp { + if dir, nxt = overlap(in, nxt); dir == empty || dir == bad { + break + } + } + if nxt.Len > 0 && dir != bad { + tmp = append(tmp, nxt) + } + } + tmp.sort() + if false && !tmp.valid() { // debug checking + log.Fatalf("here %d", len(tmp)) + } + return tmp +} + +type direction int + +const ( + empty direction = iota // diag is empty (so not in lcs) + leftdown // proposed acceptably to the left and below + rightup // proposed diag is acceptably to the right and above + bad // proposed diag is inconsistent with the lcs so far +) + +// overlap trims the proposed diag prop so it doesn't overlap with +// the existing diag that has already been added to the lcs. +func overlap(exist, prop diag) (direction, diag) { + if prop.X <= exist.X && exist.X < prop.X+prop.Len { + // remove the end of prop where it overlaps with the X end of exist + delta := prop.X + prop.Len - exist.X + prop.Len -= delta + if prop.Len <= 0 { + return empty, prop + } + } + if exist.X <= prop.X && prop.X < exist.X+exist.Len { + // remove the beginning of prop where overlaps with exist + delta := exist.X + exist.Len - prop.X + prop.Len -= delta + if prop.Len <= 0 { + return empty, prop + } + prop.X += delta + prop.Y += delta + } + if prop.Y <= exist.Y && exist.Y < prop.Y+prop.Len { + // remove the end of prop that overlaps (in Y) with exist + delta := prop.Y + prop.Len - exist.Y + prop.Len -= delta + if prop.Len <= 0 { + return empty, prop + } + } + if exist.Y <= prop.Y && prop.Y < exist.Y+exist.Len { + // remove the beginning of peop that overlaps with exist + delta := exist.Y + exist.Len - prop.Y + prop.Len -= delta + if prop.Len <= 0 { + return empty, prop + } + prop.X += delta // no test reaches this code + prop.Y += delta + } + if prop.X+prop.Len <= exist.X && prop.Y+prop.Len <= exist.Y { + return leftdown, prop + } + if exist.X+exist.Len <= prop.X && exist.Y+exist.Len <= prop.Y { + return rightup, prop + } + // prop can't be in an lcs that contains exist + return bad, prop +} + +// manipulating Diag and lcs + +// prepend a diagonal (x,y)-(x+1,y+1) segment either to an empty lcs +// or to its first Diag. prepend is only called to extend diagonals +// the backward direction. +func (lcs lcs) prepend(x, y int) lcs { + if len(lcs) > 0 { + d := &lcs[0] + if int(d.X) == x+1 && int(d.Y) == y+1 { + // extend the diagonal down and to the left + d.X, d.Y = int(x), int(y) + d.Len++ + return lcs + } + } + + r := diag{X: int(x), Y: int(y), Len: 1} + lcs = append([]diag{r}, lcs...) + return lcs +} + +// append appends a diagonal, or extends the existing one. +// by adding the edge (x,y)-(x+1.y+1). append is only called +// to extend diagonals in the forward direction. +func (lcs lcs) append(x, y int) lcs { + if len(lcs) > 0 { + last := &lcs[len(lcs)-1] + // Expand last element if adjoining. + if last.X+last.Len == x && last.Y+last.Len == y { + last.Len++ + return lcs + } + } + + return append(lcs, diag{X: x, Y: y, Len: 1}) +} + +// enforce constraint on d, k +func ok(d, k int) bool { + return d >= 0 && -d <= k && k <= d +} diff --git a/pkg/plugin/processor/builtin/internal/diff/lcs/common_test.go b/pkg/plugin/processor/builtin/internal/diff/lcs/common_test.go new file mode 100644 index 000000000..f19245e40 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/lcs/common_test.go @@ -0,0 +1,140 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lcs + +import ( + "log" + "math/rand" + "strings" + "testing" +) + +type Btest struct { + a, b string + lcs []string +} + +var Btests = []Btest{ + {"aaabab", "abaab", []string{"abab", "aaab"}}, + {"aabbba", "baaba", []string{"aaba"}}, + {"cabbx", "cbabx", []string{"cabx", "cbbx"}}, + {"c", "cb", []string{"c"}}, + {"aaba", "bbb", []string{"b"}}, + {"bbaabb", "b", []string{"b"}}, + {"baaabb", "bbaba", []string{"bbb", "baa", "bab"}}, + {"baaabb", "abbab", []string{"abb", "bab", "aab"}}, + {"baaba", "aaabba", []string{"aaba"}}, + {"ca", "cba", []string{"ca"}}, + {"ccbcbc", "abba", []string{"bb"}}, + {"ccbcbc", "aabba", []string{"bb"}}, + {"ccb", "cba", []string{"cb"}}, + {"caef", "axe", []string{"ae"}}, + {"bbaabb", "baabb", []string{"baabb"}}, + // Example from Myers: + {"abcabba", "cbabac", []string{"caba", "baba", "cbba"}}, + {"3456aaa", "aaa", []string{"aaa"}}, + {"aaa", "aaa123", []string{"aaa"}}, + {"aabaa", "aacaa", []string{"aaaa"}}, + {"1a", "a", []string{"a"}}, + {"abab", "bb", []string{"bb"}}, + {"123", "ab", []string{""}}, + {"a", "b", []string{""}}, + {"abc", "123", []string{""}}, + {"aa", "aa", []string{"aa"}}, + {"abcde", "12345", []string{""}}, + {"aaa3456", "aaa", []string{"aaa"}}, + {"abcde", "12345a", []string{"a"}}, + {"ab", "123", []string{""}}, + {"1a2", "a", []string{"a"}}, + // for two-sided + {"babaab", "cccaba", []string{"aba"}}, + {"aabbab", "cbcabc", []string{"bab"}}, + {"abaabb", "bcacab", []string{"baab"}}, + {"abaabb", "abaaaa", []string{"abaa"}}, + {"bababb", "baaabb", []string{"baabb"}}, + {"abbbaa", "cabacc", []string{"aba"}}, + {"aabbaa", "aacaba", []string{"aaaa", "aaba"}}, +} + +func init() { + log.SetFlags(log.Lshortfile) +} + +func check(t *testing.T, str string, lcs lcs, want []string) { + t.Helper() + if !lcs.valid() { + t.Errorf("bad lcs %v", lcs) + } + var got strings.Builder + for _, dd := range lcs { + got.WriteString(str[dd.X : dd.X+dd.Len]) + } + ans := got.String() + for _, w := range want { + if ans == w { + return + } + } + t.Fatalf("str=%q lcs=%v want=%q got=%q", str, lcs, want, ans) +} + +func checkDiffs(t *testing.T, before string, diffs []Diff, after string) { + t.Helper() + var ans strings.Builder + sofar := 0 // index of position in before + for _, d := range diffs { + if sofar < d.Start { + ans.WriteString(before[sofar:d.Start]) + } + ans.WriteString(after[d.ReplStart:d.ReplEnd]) + sofar = d.End + } + ans.WriteString(before[sofar:]) + if ans.String() != after { + t.Fatalf("diff %v took %q to %q, not to %q", diffs, before, ans.String(), after) + } +} + +func lcslen(l lcs) int { + ans := 0 + for _, d := range l { + ans += int(d.Len) + } + return ans +} + +// return a random string of length n made of characters from s +func randstr(s string, n int) string { + src := []rune(s) + x := make([]rune, n) + for i := 0; i < n; i++ { + x[i] = src[rand.Intn(len(src))] + } + return string(x) +} + +func TestLcsFix(t *testing.T) { + tests := []struct{ before, after lcs }{ + {lcs{diag{0, 0, 3}, diag{2, 2, 5}, diag{3, 4, 5}, diag{8, 9, 4}}, lcs{diag{0, 0, 2}, diag{2, 2, 1}, diag{3, 4, 5}, diag{8, 9, 4}}}, + {lcs{diag{1, 1, 6}, diag{6, 12, 3}}, lcs{diag{1, 1, 5}, diag{6, 12, 3}}}, + {lcs{diag{0, 0, 4}, diag{3, 5, 4}}, lcs{diag{0, 0, 3}, diag{3, 5, 4}}}, + {lcs{diag{0, 20, 1}, diag{0, 0, 3}, diag{1, 20, 4}}, lcs{diag{0, 0, 3}, diag{3, 22, 2}}}, + {lcs{diag{0, 0, 4}, diag{1, 1, 2}}, lcs{diag{0, 0, 4}}}, + {lcs{diag{0, 0, 4}}, lcs{diag{0, 0, 4}}}, + {lcs{}, lcs{}}, + {lcs{diag{0, 0, 4}, diag{1, 1, 6}, diag{3, 3, 2}}, lcs{diag{0, 0, 1}, diag{1, 1, 6}}}, + } + for n, x := range tests { + got := x.before.fix() + if len(got) != len(x.after) { + t.Errorf("got %v, expected %v, for %v", got, x.after, x.before) + } + olen := lcslen(x.after) + glen := lcslen(got) + if olen != glen { + t.Errorf("%d: lens(%d,%d) differ, %v, %v, %v", n, glen, olen, got, x.after, x.before) + } + } +} diff --git a/pkg/plugin/processor/builtin/internal/diff/lcs/doc.go b/pkg/plugin/processor/builtin/internal/diff/lcs/doc.go new file mode 100644 index 000000000..9029dd20b --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/lcs/doc.go @@ -0,0 +1,156 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// package lcs contains code to find longest-common-subsequences +// (and diffs) +package lcs + +/* +Compute longest-common-subsequences of two slices A, B using +algorithms from Myers' paper. A longest-common-subsequence +(LCS from now on) of A and B is a maximal set of lexically increasing +pairs of subscripts (x,y) with A[x]==B[y]. There may be many LCS, but +they all have the same length. An LCS determines a sequence of edits +that changes A into B. + +The key concept is the edit graph of A and B. +If A has length N and B has length M, then the edit graph has +vertices v[i][j] for 0 <= i <= N, 0 <= j <= M. There is a +horizontal edge from v[i][j] to v[i+1][j] whenever both are in +the graph, and a vertical edge from v[i][j] to f[i][j+1] similarly. +When A[i] == B[j] there is a diagonal edge from v[i][j] to v[i+1][j+1]. + +A path between in the graph between (0,0) and (N,M) determines a sequence +of edits converting A into B: each horizontal edge corresponds to removing +an element of A, and each vertical edge corresponds to inserting an +element of B. + +A vertex (x,y) is on (forward) diagonal k if x-y=k. A path in the graph +is of length D if it has D non-diagonal edges. The algorithms generate +forward paths (in which at least one of x,y increases at each edge), +or backward paths (in which at least one of x,y decreases at each edge), +or a combination. (Note that the orientation is the traditional mathematical one, +with the origin in the lower-left corner.) + +Here is the edit graph for A:"aabbaa", B:"aacaba". (I know the diagonals look weird.) + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + b | | | ___/‾‾‾ | ___/‾‾‾ | | | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + c | | | | | | | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + a a b b a a + + +The algorithm labels a vertex (x,y) with D,k if it is on diagonal k and at +the end of a maximal path of length D. (Because x-y=k it suffices to remember +only the x coordinate of the vertex.) + +The forward algorithm: Find the longest diagonal starting at (0,0) and +label its end with D=0,k=0. From that vertex take a vertical step and +then follow the longest diagonal (up and to the right), and label that vertex +with D=1,k=-1. From the D=0,k=0 point take a horizontal step and the follow +the longest diagonal (up and to the right) and label that vertex +D=1,k=1. In the same way, having labelled all the D vertices, +from a vertex labelled D,k find two vertices +tentatively labelled D+1,k-1 and D+1,k+1. There may be two on the same +diagonal, in which case take the one with the larger x. + +Eventually the path gets to (N,M), and the diagonals on it are the LCS. + +Here is the edit graph with the ends of D-paths labelled. (So, for instance, +0/2,2 indicates that x=2,y=2 is labelled with 0, as it should be, since the first +step is to go up the longest diagonal from (0,0).) +A:"aabbaa", B:"aacaba" + ⊙ ------- ⊙ ------- ⊙ -------(3/3,6)------- ⊙ -------(3/5,6)-------(4/6,6) + a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ | + ⊙ ------- ⊙ ------- ⊙ -------(2/3,5)------- ⊙ ------- ⊙ ------- ⊙ + b | | | ___/‾‾‾ | ___/‾‾‾ | | | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ -------(3/5,4)------- ⊙ + a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ | + ⊙ ------- ⊙ -------(1/2,3)-------(2/3,3)------- ⊙ ------- ⊙ ------- ⊙ + c | | | | | | | + ⊙ ------- ⊙ -------(0/2,2)-------(1/3,2)-------(2/4,2)-------(3/5,2)-------(4/6,2) + a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + a | ___/‾‾‾ | ___/‾‾‾ | | | ___/‾‾‾ | ___/‾‾‾ | + ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ ------- ⊙ + a a b b a a + +The 4-path is reconstructed starting at (4/6,6), horizontal to (3/5,6), diagonal to (3,4), vertical +to (2/3,3), horizontal to (1/2,3), vertical to (0/2,2), and diagonal to (0,0). As expected, +there are 4 non-diagonal steps, and the diagonals form an LCS. + +There is a symmetric backward algorithm, which gives (backwards labels are prefixed with a colon): +A:"aabbaa", B:"aacaba" + ⊙ -------- ⊙ -------- ⊙ -------- ⊙ -------- ⊙ -------- ⊙ -------- ⊙ + a | ____/‾‾‾ | ____/‾‾‾ | | | ____/‾‾‾ | ____/‾‾‾ | + ⊙ -------- ⊙ -------- ⊙ -------- ⊙ -------- ⊙ --------(:0/5,5)-------- ⊙ + b | | | ____/‾‾‾ | ____/‾‾‾ | | | + ⊙ -------- ⊙ -------- ⊙ --------(:1/3,4)-------- ⊙ -------- ⊙ -------- ⊙ + a | ____/‾‾‾ | ____/‾‾‾ | | | ____/‾‾‾ | ____/‾‾‾ | + (:3/0,3)--------(:2/1,3)-------- ⊙ --------(:2/3,3)--------(:1/4,3)-------- ⊙ -------- ⊙ + c | | | | | | | + ⊙ -------- ⊙ -------- ⊙ --------(:3/3,2)--------(:2/4,2)-------- ⊙ -------- ⊙ + a | ____/‾‾‾ | ____/‾‾‾ | | | ____/‾‾‾ | ____/‾‾‾ | + (:3/0,1)-------- ⊙ -------- ⊙ -------- ⊙ --------(:3/4,1)-------- ⊙ -------- ⊙ + a | ____/‾‾‾ | ____/‾‾‾ | | | ____/‾‾‾ | ____/‾‾‾ | + (:4/0,0)-------- ⊙ -------- ⊙ -------- ⊙ --------(:4/4,0)-------- ⊙ -------- ⊙ + a a b b a a + +Neither of these is ideal for use in an editor, where it is undesirable to send very long diffs to the +front end. It's tricky to decide exactly what 'very long diffs' means, as "replace A by B" is very short. +We want to control how big D can be, by stopping when it gets too large. The forward algorithm then +privileges common prefixes, and the backward algorithm privileges common suffixes. Either is an undesirable +asymmetry. + +Fortunately there is a two-sided algorithm, implied by results in Myers' paper. Here's what the labels in +the edit graph look like. +A:"aabbaa", B:"aacaba" + ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ + a | ____/‾‾‾‾ | ____/‾‾‾‾ | | | ____/‾‾‾‾ | ____/‾‾‾‾ | + ⊙ --------- ⊙ --------- ⊙ --------- (2/3,5) --------- ⊙ --------- (:0/5,5)--------- ⊙ + b | | | ____/‾‾‾‾ | ____/‾‾‾‾ | | | + ⊙ --------- ⊙ --------- ⊙ --------- (:1/3,4)--------- ⊙ --------- ⊙ --------- ⊙ + a | ____/‾‾‾‾ | ____/‾‾‾‾ | | | ____/‾‾‾‾ | ____/‾‾‾‾ | + ⊙ --------- (:2/1,3)--------- (1/2,3) ---------(2:2/3,3)--------- (:1/4,3)--------- ⊙ --------- ⊙ + c | | | | | | | + ⊙ --------- ⊙ --------- (0/2,2) --------- (1/3,2) ---------(2:2/4,2)--------- ⊙ --------- ⊙ + a | ____/‾‾‾‾ | ____/‾‾‾‾ | | | ____/‾‾‾‾ | ____/‾‾‾‾ | + ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ + a | ____/‾‾‾‾ | ____/‾‾‾‾ | | | ____/‾‾‾‾ | ____/‾‾‾‾ | + ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ --------- ⊙ + a a b b a a + +The algorithm stopped when it saw the backwards 2-path ending at (1,3) and the forwards 2-path ending at (3,5). The criterion +is a backwards path ending at (u,v) and a forward path ending at (x,y), where u <= x and the two points are on the same +diagonal. (Here the edgegraph has a diagonal, but the criterion is x-y=u-v.) Myers proves there is a forward +2-path from (0,0) to (1,3), and that together with the backwards 2-path ending at (1,3) gives the expected 4-path. +Unfortunately the forward path has to be constructed by another run of the forward algorithm; it can't be found from the +computed labels. That is the worst case. Had the code noticed (x,y)=(u,v)=(3,3) the whole path could be reconstructed +from the edgegraph. The implementation looks for a number of special cases to try to avoid computing an extra forward path. + +If the two-sided algorithm has stop early (because D has become too large) it will have found a forward LCS and a +backwards LCS. Ideally these go with disjoint prefixes and suffixes of A and B, but disjointness may fail and the two +computed LCS may conflict. (An easy example is where A is a suffix of B, and shares a short prefix. The backwards LCS +is all of A, and the forward LCS is a prefix of A.) The algorithm combines the two +to form a best-effort LCS. In the worst case the forward partial LCS may have to +be recomputed. +*/ + +/* Eugene Myers paper is titled +"An O(ND) Difference Algorithm and Its Variations" +and can be found at +http://www.xmailserver.org/diff2.pdf + +(There is a generic implementation of the algorithm the repository with git hash +b9ad7e4ade3a686d608e44475390ad428e60e7fc) +*/ diff --git a/pkg/plugin/processor/builtin/internal/diff/lcs/git.sh b/pkg/plugin/processor/builtin/internal/diff/lcs/git.sh new file mode 100644 index 000000000..b25ba4aac --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/lcs/git.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# +# Copyright 2022 The Go Authors. All rights reserved. +# Use of this source code is governed by a BSD-style +# license that can be found in the LICENSE file. +# +# Creates a zip file containing all numbered versions +# of the commit history of a large source file, for use +# as input data for the tests of the diff algorithm. +# +# Run script from root of the x/tools repo. + +set -eu + +# WARNING: This script will install the latest version of $file +# The largest real source file in the x/tools repo. +# file=internal/golang/completion/completion.go +# file=internal/golang/diagnostics.go +file=internal/protocol/tsprotocol.go + +tmp=$(mktemp -d) +git log $file | + awk '/^commit / {print $2}' | + nl -ba -nrz | + while read n hash; do + git checkout --quiet $hash $file + cp -f $file $tmp/$n + done +(cd $tmp && zip -q - *) > testdata.zip +rm -fr $tmp +git restore --staged $file +git restore $file +echo "Created testdata.zip" diff --git a/pkg/plugin/processor/builtin/internal/diff/lcs/labels.go b/pkg/plugin/processor/builtin/internal/diff/lcs/labels.go new file mode 100644 index 000000000..504913d1d --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/lcs/labels.go @@ -0,0 +1,55 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lcs + +import ( + "fmt" +) + +// For each D, vec[D] has length D+1, +// and the label for (D, k) is stored in vec[D][(D+k)/2]. +type label struct { + vec [][]int +} + +// Temporary checking DO NOT COMMIT true TO PRODUCTION CODE +const debug = false + +// debugging. check that the (d,k) pair is valid +// (that is, -d<=k<=d and d+k even) +func checkDK(D, k int) { + if k >= -D && k <= D && (D+k)%2 == 0 { + return + } + panic(fmt.Sprintf("out of range, d=%d,k=%d", D, k)) +} + +func (t *label) set(D, k, x int) { + if debug { + checkDK(D, k) + } + for len(t.vec) <= D { + t.vec = append(t.vec, nil) + } + if t.vec[D] == nil { + t.vec[D] = make([]int, D+1) + } + t.vec[D][(D+k)/2] = x // known that D+k is even +} + +func (t *label) get(d, k int) int { + if debug { + checkDK(d, k) + } + return int(t.vec[d][(d+k)/2]) +} + +func newtriang(limit int) label { + if limit < 100 { + // Preallocate if limit is not large. + return label{vec: make([][]int, limit)} + } + return label{} +} diff --git a/pkg/plugin/processor/builtin/internal/diff/lcs/old.go b/pkg/plugin/processor/builtin/internal/diff/lcs/old.go new file mode 100644 index 000000000..a14ae9119 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/lcs/old.go @@ -0,0 +1,480 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lcs + +// TODO(adonovan): remove unclear references to "old" in this package. + +import ( + "fmt" +) + +// A Diff is a replacement of a portion of A by a portion of B. +type Diff struct { + Start, End int // offsets of portion to delete in A + ReplStart, ReplEnd int // offset of replacement text in B +} + +// DiffStrings returns the differences between two strings. +// It does not respect rune boundaries. +func DiffStrings(a, b string) []Diff { return diff(stringSeqs{a, b}) } + +// DiffBytes returns the differences between two byte sequences. +// It does not respect rune boundaries. +func DiffBytes(a, b []byte) []Diff { return diff(bytesSeqs{a, b}) } + +// DiffRunes returns the differences between two rune sequences. +func DiffRunes(a, b []rune) []Diff { return diff(runesSeqs{a, b}) } + +func diff(seqs sequences) []Diff { + // A limit on how deeply the LCS algorithm should search. The value is just a guess. + const maxDiffs = 100 + diff, _ := compute(seqs, twosided, maxDiffs/2) + return diff +} + +// compute computes the list of differences between two sequences, +// along with the LCS. It is exercised directly by tests. +// The algorithm is one of {forward, backward, twosided}. +func compute(seqs sequences, algo func(*editGraph) lcs, limit int) ([]Diff, lcs) { + if limit <= 0 { + limit = 1 << 25 // effectively infinity + } + alen, blen := seqs.lengths() + g := &editGraph{ + seqs: seqs, + vf: newtriang(limit), + vb: newtriang(limit), + limit: limit, + ux: alen, + uy: blen, + delta: alen - blen, + } + lcs := algo(g) + diffs := lcs.toDiffs(alen, blen) + return diffs, lcs +} + +// editGraph carries the information for computing the lcs of two sequences. +type editGraph struct { + seqs sequences + vf, vb label // forward and backward labels + + limit int // maximal value of D + // the bounding rectangle of the current edit graph + lx, ly, ux, uy int + delta int // common subexpression: (ux-lx)-(uy-ly) +} + +// toDiffs converts an LCS to a list of edits. +func (lcs lcs) toDiffs(alen, blen int) []Diff { + var diffs []Diff + var pa, pb int // offsets in a, b + for _, l := range lcs { + if pa < l.X || pb < l.Y { + diffs = append(diffs, Diff{pa, l.X, pb, l.Y}) + } + pa = l.X + l.Len + pb = l.Y + l.Len + } + if pa < alen || pb < blen { + diffs = append(diffs, Diff{pa, alen, pb, blen}) + } + return diffs +} + +// --- FORWARD --- + +// fdone decides if the forwward path has reached the upper right +// corner of the rectangle. If so, it also returns the computed lcs. +func (e *editGraph) fdone(D, k int) (bool, lcs) { + // x, y, k are relative to the rectangle + x := e.vf.get(D, k) + y := x - k + if x == e.ux && y == e.uy { + return true, e.forwardlcs(D, k) + } + return false, nil +} + +// run the forward algorithm, until success or up to the limit on D. +func forward(e *editGraph) lcs { + e.setForward(0, 0, e.lx) + if ok, ans := e.fdone(0, 0); ok { + return ans + } + // from D to D+1 + for D := 0; D < e.limit; D++ { + e.setForward(D+1, -(D + 1), e.getForward(D, -D)) + if ok, ans := e.fdone(D+1, -(D + 1)); ok { + return ans + } + e.setForward(D+1, D+1, e.getForward(D, D)+1) + if ok, ans := e.fdone(D+1, D+1); ok { + return ans + } + for k := -D + 1; k <= D-1; k += 2 { + // these are tricky and easy to get backwards + lookv := e.lookForward(k, e.getForward(D, k-1)+1) + lookh := e.lookForward(k, e.getForward(D, k+1)) + if lookv > lookh { + e.setForward(D+1, k, lookv) + } else { + e.setForward(D+1, k, lookh) + } + if ok, ans := e.fdone(D+1, k); ok { + return ans + } + } + } + // D is too large + // find the D path with maximal x+y inside the rectangle and + // use that to compute the found part of the lcs + kmax := -e.limit - 1 + diagmax := -1 + for k := -e.limit; k <= e.limit; k += 2 { + x := e.getForward(e.limit, k) + y := x - k + if x+y > diagmax && x <= e.ux && y <= e.uy { + diagmax, kmax = x+y, k + } + } + return e.forwardlcs(e.limit, kmax) +} + +// recover the lcs by backtracking from the farthest point reached +func (e *editGraph) forwardlcs(D, k int) lcs { + var ans lcs + for x := e.getForward(D, k); x != 0 || x-k != 0; { + if ok(D-1, k-1) && x-1 == e.getForward(D-1, k-1) { + // if (x-1,y) is labelled D-1, x--,D--,k--,continue + D, k, x = D-1, k-1, x-1 + continue + } else if ok(D-1, k+1) && x == e.getForward(D-1, k+1) { + // if (x,y-1) is labelled D-1, x, D--,k++, continue + D, k = D-1, k+1 + continue + } + // if (x-1,y-1)--(x,y) is a diagonal, prepend,x--,y--, continue + y := x - k + ans = ans.prepend(x+e.lx-1, y+e.ly-1) + x-- + } + return ans +} + +// start at (x,y), go up the diagonal as far as possible, +// and label the result with d +func (e *editGraph) lookForward(k, relx int) int { + rely := relx - k + x, y := relx+e.lx, rely+e.ly + if x < e.ux && y < e.uy { + x += e.seqs.commonPrefixLen(x, e.ux, y, e.uy) + } + return x +} + +func (e *editGraph) setForward(d, k, relx int) { + x := e.lookForward(k, relx) + e.vf.set(d, k, x-e.lx) +} + +func (e *editGraph) getForward(d, k int) int { + x := e.vf.get(d, k) + return x +} + +// --- BACKWARD --- + +// bdone decides if the backward path has reached the lower left corner +func (e *editGraph) bdone(D, k int) (bool, lcs) { + // x, y, k are relative to the rectangle + x := e.vb.get(D, k) + y := x - (k + e.delta) + if x == 0 && y == 0 { + return true, e.backwardlcs(D, k) + } + return false, nil +} + +// run the backward algorithm, until success or up to the limit on D. +func backward(e *editGraph) lcs { + e.setBackward(0, 0, e.ux) + if ok, ans := e.bdone(0, 0); ok { + return ans + } + // from D to D+1 + for D := 0; D < e.limit; D++ { + e.setBackward(D+1, -(D + 1), e.getBackward(D, -D)-1) + if ok, ans := e.bdone(D+1, -(D + 1)); ok { + return ans + } + e.setBackward(D+1, D+1, e.getBackward(D, D)) + if ok, ans := e.bdone(D+1, D+1); ok { + return ans + } + for k := -D + 1; k <= D-1; k += 2 { + // these are tricky and easy to get wrong + lookv := e.lookBackward(k, e.getBackward(D, k-1)) + lookh := e.lookBackward(k, e.getBackward(D, k+1)-1) + if lookv < lookh { + e.setBackward(D+1, k, lookv) + } else { + e.setBackward(D+1, k, lookh) + } + if ok, ans := e.bdone(D+1, k); ok { + return ans + } + } + } + + // D is too large + // find the D path with minimal x+y inside the rectangle and + // use that to compute the part of the lcs found + kmax := -e.limit - 1 + diagmin := 1 << 25 + for k := -e.limit; k <= e.limit; k += 2 { + x := e.getBackward(e.limit, k) + y := x - (k + e.delta) + if x+y < diagmin && x >= 0 && y >= 0 { + diagmin, kmax = x+y, k + } + } + if kmax < -e.limit { + panic(fmt.Sprintf("no paths when limit=%d?", e.limit)) + } + return e.backwardlcs(e.limit, kmax) +} + +// recover the lcs by backtracking +func (e *editGraph) backwardlcs(D, k int) lcs { + var ans lcs + for x := e.getBackward(D, k); x != e.ux || x-(k+e.delta) != e.uy; { + if ok(D-1, k-1) && x == e.getBackward(D-1, k-1) { + // D--, k--, x unchanged + D, k = D-1, k-1 + continue + } else if ok(D-1, k+1) && x+1 == e.getBackward(D-1, k+1) { + // D--, k++, x++ + D, k, x = D-1, k+1, x+1 + continue + } + y := x - (k + e.delta) + ans = ans.append(x+e.lx, y+e.ly) + x++ + } + return ans +} + +// start at (x,y), go down the diagonal as far as possible, +func (e *editGraph) lookBackward(k, relx int) int { + rely := relx - (k + e.delta) // forward k = k + e.delta + x, y := relx+e.lx, rely+e.ly + if x > 0 && y > 0 { + x -= e.seqs.commonSuffixLen(0, x, 0, y) + } + return x +} + +// convert to rectangle, and label the result with d +func (e *editGraph) setBackward(d, k, relx int) { + x := e.lookBackward(k, relx) + e.vb.set(d, k, x-e.lx) +} + +func (e *editGraph) getBackward(d, k int) int { + x := e.vb.get(d, k) + return x +} + +// -- TWOSIDED --- + +func twosided(e *editGraph) lcs { + // The termination condition could be improved, as either the forward + // or backward pass could succeed before Myers' Lemma applies. + // Aside from questions of efficiency (is the extra testing cost-effective) + // this is more likely to matter when e.limit is reached. + e.setForward(0, 0, e.lx) + e.setBackward(0, 0, e.ux) + + // from D to D+1 + for D := 0; D < e.limit; D++ { + // just finished a backwards pass, so check + if got, ok := e.twoDone(D, D); ok { + return e.twolcs(D, D, got) + } + // do a forwards pass (D to D+1) + e.setForward(D+1, -(D + 1), e.getForward(D, -D)) + e.setForward(D+1, D+1, e.getForward(D, D)+1) + for k := -D + 1; k <= D-1; k += 2 { + // these are tricky and easy to get backwards + lookv := e.lookForward(k, e.getForward(D, k-1)+1) + lookh := e.lookForward(k, e.getForward(D, k+1)) + if lookv > lookh { + e.setForward(D+1, k, lookv) + } else { + e.setForward(D+1, k, lookh) + } + } + // just did a forward pass, so check + if got, ok := e.twoDone(D+1, D); ok { + return e.twolcs(D+1, D, got) + } + // do a backward pass, D to D+1 + e.setBackward(D+1, -(D + 1), e.getBackward(D, -D)-1) + e.setBackward(D+1, D+1, e.getBackward(D, D)) + for k := -D + 1; k <= D-1; k += 2 { + // these are tricky and easy to get wrong + lookv := e.lookBackward(k, e.getBackward(D, k-1)) + lookh := e.lookBackward(k, e.getBackward(D, k+1)-1) + if lookv < lookh { + e.setBackward(D+1, k, lookv) + } else { + e.setBackward(D+1, k, lookh) + } + } + } + + // D too large. combine a forward and backward partial lcs + // first, a forward one + kmax := -e.limit - 1 + diagmax := -1 + for k := -e.limit; k <= e.limit; k += 2 { + x := e.getForward(e.limit, k) + y := x - k + if x+y > diagmax && x <= e.ux && y <= e.uy { + diagmax, kmax = x+y, k + } + } + if kmax < -e.limit { + panic(fmt.Sprintf("no forward paths when limit=%d?", e.limit)) + } + lcs := e.forwardlcs(e.limit, kmax) + // now a backward one + // find the D path with minimal x+y inside the rectangle and + // use that to compute the lcs + diagmin := 1 << 25 // infinity + for k := -e.limit; k <= e.limit; k += 2 { + x := e.getBackward(e.limit, k) + y := x - (k + e.delta) + if x+y < diagmin && x >= 0 && y >= 0 { + diagmin, kmax = x+y, k + } + } + if kmax < -e.limit { + panic(fmt.Sprintf("no backward paths when limit=%d?", e.limit)) + } + lcs = append(lcs, e.backwardlcs(e.limit, kmax)...) + // These may overlap (e.forwardlcs and e.backwardlcs return sorted lcs) + ans := lcs.fix() + return ans +} + +// Does Myers' Lemma apply? +func (e *editGraph) twoDone(df, db int) (int, bool) { + if (df+db+e.delta)%2 != 0 { + return 0, false // diagonals cannot overlap + } + kmin := -db + e.delta + if -df > kmin { + kmin = -df + } + kmax := db + e.delta + if df < kmax { + kmax = df + } + for k := kmin; k <= kmax; k += 2 { + x := e.vf.get(df, k) + u := e.vb.get(db, k-e.delta) + if u <= x { + // is it worth looking at all the other k? + for l := k; l <= kmax; l += 2 { + x := e.vf.get(df, l) + y := x - l + u := e.vb.get(db, l-e.delta) + v := u - l + if x == u || u == 0 || v == 0 || y == e.uy || x == e.ux { + return l, true + } + } + return k, true + } + } + return 0, false +} + +func (e *editGraph) twolcs(df, db, kf int) lcs { + // db==df || db+1==df + x := e.vf.get(df, kf) + y := x - kf + kb := kf - e.delta + u := e.vb.get(db, kb) + v := u - kf + + // Myers proved there is a df-path from (0,0) to (u,v) + // and a db-path from (x,y) to (N,M). + // In the first case the overall path is the forward path + // to (u,v) followed by the backward path to (N,M). + // In the second case the path is the backward path to (x,y) + // followed by the forward path to (x,y) from (0,0). + + // Look for some special cases to avoid computing either of these paths. + if x == u { + // "babaab" "cccaba" + // already patched together + lcs := e.forwardlcs(df, kf) + lcs = append(lcs, e.backwardlcs(db, kb)...) + return lcs.sort() + } + + // is (u-1,v) or (u,v-1) labelled df-1? + // if so, that forward df-1-path plus a horizontal or vertical edge + // is the df-path to (u,v), then plus the db-path to (N,M) + if u > 0 && ok(df-1, u-1-v) && e.vf.get(df-1, u-1-v) == u-1 { + // "aabbab" "cbcabc" + lcs := e.forwardlcs(df-1, u-1-v) + lcs = append(lcs, e.backwardlcs(db, kb)...) + return lcs.sort() + } + if v > 0 && ok(df-1, (u-(v-1))) && e.vf.get(df-1, u-(v-1)) == u { + // "abaabb" "bcacab" + lcs := e.forwardlcs(df-1, u-(v-1)) + lcs = append(lcs, e.backwardlcs(db, kb)...) + return lcs.sort() + } + + // The path can't possibly contribute to the lcs because it + // is all horizontal or vertical edges + if u == 0 || v == 0 || x == e.ux || y == e.uy { + // "abaabb" "abaaaa" + if u == 0 || v == 0 { + return e.backwardlcs(db, kb) + } + return e.forwardlcs(df, kf) + } + + // is (x+1,y) or (x,y+1) labelled db-1? + if x+1 <= e.ux && ok(db-1, x+1-y-e.delta) && e.vb.get(db-1, x+1-y-e.delta) == x+1 { + // "bababb" "baaabb" + lcs := e.backwardlcs(db-1, kb+1) + lcs = append(lcs, e.forwardlcs(df, kf)...) + return lcs.sort() + } + if y+1 <= e.uy && ok(db-1, x-(y+1)-e.delta) && e.vb.get(db-1, x-(y+1)-e.delta) == x { + // "abbbaa" "cabacc" + lcs := e.backwardlcs(db-1, kb-1) + lcs = append(lcs, e.forwardlcs(df, kf)...) + return lcs.sort() + } + + // need to compute another path + // "aabbaa" "aacaba" + lcs := e.backwardlcs(db, kb) + oldx, oldy := e.ux, e.uy + e.ux = u + e.uy = v + lcs = append(lcs, forward(e)...) + e.ux, e.uy = oldx, oldy + return lcs.sort() +} diff --git a/pkg/plugin/processor/builtin/internal/diff/lcs/old_test.go b/pkg/plugin/processor/builtin/internal/diff/lcs/old_test.go new file mode 100644 index 000000000..789e9bc6c --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/lcs/old_test.go @@ -0,0 +1,251 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lcs + +import ( + "fmt" + "log" + "math/rand" + "os" + "strings" + "testing" +) + +func TestAlgosOld(t *testing.T) { + for i, algo := range []func(*editGraph) lcs{forward, backward, twosided} { + t.Run(strings.Fields("forward backward twosided")[i], func(t *testing.T) { + for _, tx := range Btests { + lim := len(tx.a) + len(tx.b) + + diffs, lcs := compute(stringSeqs{tx.a, tx.b}, algo, lim) + check(t, tx.a, lcs, tx.lcs) + checkDiffs(t, tx.a, diffs, tx.b) + + diffs, lcs = compute(stringSeqs{tx.b, tx.a}, algo, lim) + check(t, tx.b, lcs, tx.lcs) + checkDiffs(t, tx.b, diffs, tx.a) + } + }) + } +} + +func TestIntOld(t *testing.T) { + // need to avoid any characters in btests + lfill, rfill := "AAAAAAAAAAAA", "BBBBBBBBBBBB" + for _, tx := range Btests { + if len(tx.a) < 2 || len(tx.b) < 2 { + continue + } + left := tx.a + lfill + right := tx.b + rfill + lim := len(tx.a) + len(tx.b) + diffs, lcs := compute(stringSeqs{left, right}, twosided, lim) + check(t, left, lcs, tx.lcs) + checkDiffs(t, left, diffs, right) + diffs, lcs = compute(stringSeqs{right, left}, twosided, lim) + check(t, right, lcs, tx.lcs) + checkDiffs(t, right, diffs, left) + + left = lfill + tx.a + right = rfill + tx.b + diffs, lcs = compute(stringSeqs{left, right}, twosided, lim) + check(t, left, lcs, tx.lcs) + checkDiffs(t, left, diffs, right) + diffs, lcs = compute(stringSeqs{right, left}, twosided, lim) + check(t, right, lcs, tx.lcs) + checkDiffs(t, right, diffs, left) + } +} + +func TestSpecialOld(t *testing.T) { // exercises lcs.fix + a := "golang.org/x/tools/intern" + b := "github.com/google/safehtml/template\"\n\t\"golang.org/x/tools/intern" + diffs, lcs := compute(stringSeqs{a, b}, twosided, 4) + if !lcs.valid() { + t.Errorf("%d,%v", len(diffs), lcs) + } +} + +func TestRegressionOld001(t *testing.T) { + a := "// Copyright 2019 The Go Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style\n// license that can be found in the LICENSE file.\n\npackage diff_test\n\nimport (\n\t\"fmt\"\n\t\"math/rand\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"golang.org/x/tools/gopls/internal/lsp/diff\"\n\t\"github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/difftest\"\n\t\"golang.org/x/tools/gopls/internal/span\"\n)\n" + + b := "// Copyright 2019 The Go Authors. All rights reserved.\n// Use of this source code is governed by a BSD-style\n// license that can be found in the LICENSE file.\n\npackage diff_test\n\nimport (\n\t\"fmt\"\n\t\"math/rand\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"github.com/google/safehtml/template\"\n\t\"golang.org/x/tools/gopls/internal/lsp/diff\"\n\t\"github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/difftest\"\n\t\"golang.org/x/tools/gopls/internal/span\"\n)\n" + for i := 1; i < len(b); i++ { + diffs, lcs := compute(stringSeqs{a, b}, twosided, i) // 14 from gopls + if !lcs.valid() { + t.Errorf("%d,%v", len(diffs), lcs) + } + checkDiffs(t, a, diffs, b) + } +} + +func TestRegressionOld002(t *testing.T) { + a := "n\"\n)\n" + b := "n\"\n\t\"golang.org/x//nnal/stack\"\n)\n" + for i := 1; i <= len(b); i++ { + diffs, lcs := compute(stringSeqs{a, b}, twosided, i) + if !lcs.valid() { + t.Errorf("%d,%v", len(diffs), lcs) + } + checkDiffs(t, a, diffs, b) + } +} + +func TestRegressionOld003(t *testing.T) { + a := "golang.org/x/hello v1.0.0\nrequire golang.org/x/unused v1" + b := "golang.org/x/hello v1" + for i := 1; i <= len(a); i++ { + diffs, lcs := compute(stringSeqs{a, b}, twosided, i) + if !lcs.valid() { + t.Errorf("%d,%v", len(diffs), lcs) + } + checkDiffs(t, a, diffs, b) + } +} + +func TestRandOld(t *testing.T) { + rand.Seed(1) + for i := 0; i < 1000; i++ { + // TODO(adonovan): use ASCII and bytesSeqs here? The use of + // non-ASCII isn't relevant to the property exercised by the test. + a := []rune(randstr("abω", 16)) + b := []rune(randstr("abωc", 16)) + seq := runesSeqs{a, b} + + const lim = 24 // large enough to get true lcs + _, forw := compute(seq, forward, lim) + _, back := compute(seq, backward, lim) + _, two := compute(seq, twosided, lim) + if lcslen(two) != lcslen(forw) || lcslen(forw) != lcslen(back) { + t.Logf("\n%v\n%v\n%v", forw, back, two) + t.Fatalf("%d forw:%d back:%d two:%d", i, lcslen(forw), lcslen(back), lcslen(two)) + } + if !two.valid() || !forw.valid() || !back.valid() { + t.Errorf("check failure") + } + } +} + +// TestDiffAPI tests the public API functions (Diff{Bytes,Strings,Runes}) +// to ensure at least miminal parity of the three representations. +func TestDiffAPI(t *testing.T) { + for _, test := range []struct { + a, b string + wantStrings, wantBytes, wantRunes string + }{ + {"abcXdef", "abcxdef", "[{3 4 3 4}]", "[{3 4 3 4}]", "[{3 4 3 4}]"}, // ASCII + {"abcωdef", "abcΩdef", "[{3 5 3 5}]", "[{3 5 3 5}]", "[{3 4 3 4}]"}, // non-ASCII + } { + + gotStrings := fmt.Sprint(DiffStrings(test.a, test.b)) + if gotStrings != test.wantStrings { + t.Errorf("DiffStrings(%q, %q) = %v, want %v", + test.a, test.b, gotStrings, test.wantStrings) + } + gotBytes := fmt.Sprint(DiffBytes([]byte(test.a), []byte(test.b))) + if gotBytes != test.wantBytes { + t.Errorf("DiffBytes(%q, %q) = %v, want %v", + test.a, test.b, gotBytes, test.wantBytes) + } + gotRunes := fmt.Sprint(DiffRunes([]rune(test.a), []rune(test.b))) + if gotRunes != test.wantRunes { + t.Errorf("DiffRunes(%q, %q) = %v, want %v", + test.a, test.b, gotRunes, test.wantRunes) + } + } +} + +func BenchmarkTwoOld(b *testing.B) { + tests := genBench("abc", 96) + for i := 0; i < b.N; i++ { + for _, tt := range tests { + _, two := compute(stringSeqs{tt.before, tt.after}, twosided, 100) + if !two.valid() { + b.Error("check failed") + } + } + } +} + +func BenchmarkForwOld(b *testing.B) { + tests := genBench("abc", 96) + for i := 0; i < b.N; i++ { + for _, tt := range tests { + _, two := compute(stringSeqs{tt.before, tt.after}, forward, 100) + if !two.valid() { + b.Error("check failed") + } + } + } +} + +func genBench(set string, n int) []struct{ before, after string } { + // before and after for benchmarks. 24 strings of length n with + // before and after differing at least once, and about 5% + rand.Seed(3) + var ans []struct{ before, after string } + for i := 0; i < 24; i++ { + // maybe b should have an approximately known number of diffs + a := randstr(set, n) + cnt := 0 + bb := make([]rune, 0, n) + for _, r := range a { + if rand.Float64() < .05 { + cnt++ + r = 'N' + } + bb = append(bb, r) + } + if cnt == 0 { + // avoid == shortcut + bb[n/2] = 'N' + } + ans = append(ans, struct{ before, after string }{a, string(bb)}) + } + return ans +} + +// This benchmark represents a common case for a diff command: +// large file with a single relatively small diff in the middle. +// (It's not clear whether this is representative of gopls workloads +// or whether it is important to gopls diff performance.) +// +// TODO(adonovan) opt: it could be much faster. For example, +// comparing a file against itself is about 10x faster than with the +// small deletion in the middle. Strangely, comparing a file against +// itself minus the last byte is faster still; I don't know why. +// There is much low-hanging fruit here for further improvement. +func BenchmarkLargeFileSmallDiff(b *testing.B) { + data, err := os.ReadFile("old.go") // large file + if err != nil { + log.Fatal(err) + } + + n := len(data) + + src := string(data) + dst := src[:n*49/100] + src[n*51/100:] // remove 2% from the middle + b.Run("string", func(b *testing.B) { + for i := 0; i < b.N; i++ { + compute(stringSeqs{src, dst}, twosided, len(src)+len(dst)) + } + }) + + srcBytes := []byte(src) + dstBytes := []byte(dst) + b.Run("bytes", func(b *testing.B) { + for i := 0; i < b.N; i++ { + compute(bytesSeqs{srcBytes, dstBytes}, twosided, len(srcBytes)+len(dstBytes)) + } + }) + + srcRunes := []rune(src) + dstRunes := []rune(dst) + b.Run("runes", func(b *testing.B) { + for i := 0; i < b.N; i++ { + compute(runesSeqs{srcRunes, dstRunes}, twosided, len(srcRunes)+len(dstRunes)) + } + }) +} diff --git a/pkg/plugin/processor/builtin/internal/diff/lcs/sequence.go b/pkg/plugin/processor/builtin/internal/diff/lcs/sequence.go new file mode 100644 index 000000000..2d72d2630 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/lcs/sequence.go @@ -0,0 +1,113 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lcs + +// This file defines the abstract sequence over which the LCS algorithm operates. + +// sequences abstracts a pair of sequences, A and B. +type sequences interface { + lengths() (int, int) // len(A), len(B) + commonPrefixLen(ai, aj, bi, bj int) int // len(commonPrefix(A[ai:aj], B[bi:bj])) + commonSuffixLen(ai, aj, bi, bj int) int // len(commonSuffix(A[ai:aj], B[bi:bj])) +} + +type stringSeqs struct{ a, b string } + +func (s stringSeqs) lengths() (int, int) { return len(s.a), len(s.b) } +func (s stringSeqs) commonPrefixLen(ai, aj, bi, bj int) int { + return commonPrefixLenString(s.a[ai:aj], s.b[bi:bj]) +} +func (s stringSeqs) commonSuffixLen(ai, aj, bi, bj int) int { + return commonSuffixLenString(s.a[ai:aj], s.b[bi:bj]) +} + +// The explicit capacity in s[i:j:j] leads to more efficient code. + +type bytesSeqs struct{ a, b []byte } + +func (s bytesSeqs) lengths() (int, int) { return len(s.a), len(s.b) } +func (s bytesSeqs) commonPrefixLen(ai, aj, bi, bj int) int { + return commonPrefixLenBytes(s.a[ai:aj:aj], s.b[bi:bj:bj]) +} +func (s bytesSeqs) commonSuffixLen(ai, aj, bi, bj int) int { + return commonSuffixLenBytes(s.a[ai:aj:aj], s.b[bi:bj:bj]) +} + +type runesSeqs struct{ a, b []rune } + +func (s runesSeqs) lengths() (int, int) { return len(s.a), len(s.b) } +func (s runesSeqs) commonPrefixLen(ai, aj, bi, bj int) int { + return commonPrefixLenRunes(s.a[ai:aj:aj], s.b[bi:bj:bj]) +} +func (s runesSeqs) commonSuffixLen(ai, aj, bi, bj int) int { + return commonSuffixLenRunes(s.a[ai:aj:aj], s.b[bi:bj:bj]) +} + +// TODO(adonovan): optimize these functions using ideas from: +// - https://go.dev/cl/408116 common.go +// - https://go.dev/cl/421435 xor_generic.go + +// TODO(adonovan): factor using generics when available, +// but measure performance impact. + +// commonPrefixLen* returns the length of the common prefix of a[ai:aj] and b[bi:bj]. +func commonPrefixLenBytes(a, b []byte) int { + n := min(len(a), len(b)) + i := 0 + for i < n && a[i] == b[i] { + i++ + } + return i +} +func commonPrefixLenRunes(a, b []rune) int { + n := min(len(a), len(b)) + i := 0 + for i < n && a[i] == b[i] { + i++ + } + return i +} +func commonPrefixLenString(a, b string) int { + n := min(len(a), len(b)) + i := 0 + for i < n && a[i] == b[i] { + i++ + } + return i +} + +// commonSuffixLen* returns the length of the common suffix of a[ai:aj] and b[bi:bj]. +func commonSuffixLenBytes(a, b []byte) int { + n := min(len(a), len(b)) + i := 0 + for i < n && a[len(a)-1-i] == b[len(b)-1-i] { + i++ + } + return i +} +func commonSuffixLenRunes(a, b []rune) int { + n := min(len(a), len(b)) + i := 0 + for i < n && a[len(a)-1-i] == b[len(b)-1-i] { + i++ + } + return i +} +func commonSuffixLenString(a, b string) int { + n := min(len(a), len(b)) + i := 0 + for i < n && a[len(a)-1-i] == b[len(b)-1-i] { + i++ + } + return i +} + +func min(x, y int) int { + if x < y { + return x + } else { + return y + } +} diff --git a/pkg/plugin/processor/builtin/internal/diff/ndiff.go b/pkg/plugin/processor/builtin/internal/diff/ndiff.go new file mode 100644 index 000000000..65b3fdb2f --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/ndiff.go @@ -0,0 +1,99 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package diff + +import ( + "bytes" + "unicode/utf8" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff/lcs" +) + +// Strings computes the differences between two strings. +// The resulting edits respect rune boundaries. +func Strings(before, after string) []Edit { + if before == after { + return nil // common case + } + + if isASCII(before) && isASCII(after) { + // TODO(adonovan): opt: specialize diffASCII for strings. + return diffASCII([]byte(before), []byte(after)) + } + return diffRunes([]rune(before), []rune(after)) +} + +// Bytes computes the differences between two byte slices. +// The resulting edits respect rune boundaries. +func Bytes(before, after []byte) []Edit { + if bytes.Equal(before, after) { + return nil // common case + } + + if isASCII(before) && isASCII(after) { + return diffASCII(before, after) + } + return diffRunes(runes(before), runes(after)) +} + +func diffASCII(before, after []byte) []Edit { + diffs := lcs.DiffBytes(before, after) + + // Convert from LCS diffs. + res := make([]Edit, len(diffs)) + for i, d := range diffs { + res[i] = Edit{d.Start, d.End, string(after[d.ReplStart:d.ReplEnd])} + } + return res +} + +func diffRunes(before, after []rune) []Edit { + diffs := lcs.DiffRunes(before, after) + + // The diffs returned by the lcs package use indexes + // into whatever slice was passed in. + // Convert rune offsets to byte offsets. + res := make([]Edit, len(diffs)) + lastEnd := 0 + utf8Len := 0 + for i, d := range diffs { + utf8Len += runesLen(before[lastEnd:d.Start]) // text between edits + start := utf8Len + utf8Len += runesLen(before[d.Start:d.End]) // text deleted by this edit + res[i] = Edit{start, utf8Len, string(after[d.ReplStart:d.ReplEnd])} + lastEnd = d.End + } + return res +} + +// runes is like []rune(string(bytes)) without the duplicate allocation. +func runes(bytes []byte) []rune { + n := utf8.RuneCount(bytes) + runes := make([]rune, n) + for i := 0; i < n; i++ { + r, sz := utf8.DecodeRune(bytes) + bytes = bytes[sz:] + runes[i] = r + } + return runes +} + +// runesLen returns the length in bytes of the UTF-8 encoding of runes. +func runesLen(runes []rune) (len int) { + for _, r := range runes { + len += utf8.RuneLen(r) + } + return len +} + +// isASCII reports whether s contains only ASCII. +func isASCII[S string | []byte](s S) bool { + for i := 0; i < len(s); i++ { + if s[i] >= utf8.RuneSelf { + return false + } + } + return true +} diff --git a/pkg/plugin/processor/builtin/internal/diff/testenv/testenv.go b/pkg/plugin/processor/builtin/internal/diff/testenv/testenv.go new file mode 100644 index 000000000..d142e9356 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/testenv/testenv.go @@ -0,0 +1,199 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testenv + +import ( + "bytes" + "fmt" + "os" + "os/exec" + "path/filepath" + "runtime" + "runtime/debug" + "strings" + "sync" + "testing" +) + +// packageMainIsDevel reports whether the module containing package main +// is a development version (if module information is available). +func packageMainIsDevel() bool { + info, ok := debug.ReadBuildInfo() + if !ok { + // Most test binaries currently lack build info, but this should become more + // permissive once https://golang.org/issue/33976 is fixed. + return true + } + + // Note: info.Main.Version describes the version of the module containing + // package main, not the version of “the main module”. + // See https://golang.org/issue/33975. + return info.Main.Version == "(devel)" +} + +var checkGoBuild struct { + once sync.Once + err error +} + +func hasTool(tool string) error { + if tool == "cgo" { + enabled, err := cgoEnabled(false) + if err != nil { + return fmt.Errorf("checking cgo: %v", err) + } + if !enabled { + return fmt.Errorf("cgo not enabled") + } + return nil + } + + _, err := exec.LookPath(tool) + if err != nil { + return err + } + + switch tool { + case "patch": + // check that the patch tools supports the -o argument + temp, err := os.CreateTemp("", "patch-test") + if err != nil { + return err + } + temp.Close() + defer os.Remove(temp.Name()) + cmd := exec.Command(tool, "-o", temp.Name()) + if err := cmd.Run(); err != nil { + return err + } + + case "go": + checkGoBuild.once.Do(func() { + if runtime.GOROOT() != "" { + // Ensure that the 'go' command found by exec.LookPath is from the correct + // GOROOT. Otherwise, 'some/path/go test ./...' will test against some + // version of the 'go' binary other than 'some/path/go', which is almost + // certainly not what the user intended. + out, err := exec.Command(tool, "env", "GOROOT").CombinedOutput() + if err != nil { + checkGoBuild.err = err + return + } + GOROOT := strings.TrimSpace(string(out)) + if GOROOT != runtime.GOROOT() { + checkGoBuild.err = fmt.Errorf("'go env GOROOT' does not match runtime.GOROOT:\n\tgo env: %s\n\tGOROOT: %s", GOROOT, runtime.GOROOT()) + return + } + } + + dir, err := os.MkdirTemp("", "testenv-*") + if err != nil { + checkGoBuild.err = err + return + } + defer os.RemoveAll(dir) + + mainGo := filepath.Join(dir, "main.go") + if err := os.WriteFile(mainGo, []byte("package main\nfunc main() {}\n"), 0644); err != nil { + checkGoBuild.err = err + return + } + cmd := exec.Command("go", "build", "-o", os.DevNull, mainGo) + cmd.Dir = dir + if out, err := cmd.CombinedOutput(); err != nil { + if len(out) > 0 { + checkGoBuild.err = fmt.Errorf("%v: %v\n%s", cmd, err, out) + } else { + checkGoBuild.err = fmt.Errorf("%v: %v", cmd, err) + } + } + }) + if checkGoBuild.err != nil { + return checkGoBuild.err + } + + case "diff": + // Check that diff is the GNU version, needed for the -u argument and + // to report missing newlines at the end of files. + out, err := exec.Command(tool, "-version").Output() + if err != nil { + return err + } + if !bytes.Contains(out, []byte("GNU diffutils")) { + return fmt.Errorf("diff is not the GNU version") + } + } + + return nil +} + +func cgoEnabled(bypassEnvironment bool) (bool, error) { + cmd := exec.Command("go", "env", "CGO_ENABLED") + if bypassEnvironment { + cmd.Env = append(append([]string(nil), os.Environ()...), "CGO_ENABLED=") + } + out, err := cmd.CombinedOutput() + if err != nil { + return false, err + } + enabled := strings.TrimSpace(string(out)) + return enabled == "1", nil +} + +func allowMissingTool(tool string) bool { + switch runtime.GOOS { + case "aix", "darwin", "dragonfly", "freebsd", "illumos", "linux", "netbsd", "openbsd", "plan9", "solaris", "windows": + // Known non-mobile OS. Expect a reasonably complete environment. + default: + return true + } + + switch tool { + case "cgo": + if strings.HasSuffix(os.Getenv("GO_BUILDER_NAME"), "-nocgo") { + // Explicitly disabled on -nocgo builders. + return true + } + if enabled, err := cgoEnabled(true); err == nil && !enabled { + // No platform support. + return true + } + case "go": + if os.Getenv("GO_BUILDER_NAME") == "illumos-amd64-joyent" { + // Work around a misconfigured builder (see https://golang.org/issue/33950). + return true + } + case "diff": + if os.Getenv("GO_BUILDER_NAME") != "" { + return true + } + case "patch": + if os.Getenv("GO_BUILDER_NAME") != "" { + return true + } + } + + // If a developer is actively working on this test, we expect them to have all + // of its dependencies installed. However, if it's just a dependency of some + // other module (for example, being run via 'go test all'), we should be more + // tolerant of unusual environments. + return !packageMainIsDevel() +} + +// NeedsTool skips t if the named tool is not present in the path. +// As a special case, "cgo" means "go" is present and can compile cgo programs. +func NeedsTool(t testing.TB, tool string) { + err := hasTool(tool) + if err == nil { + return + } + + t.Helper() + if allowMissingTool(tool) { + t.Skipf("skipping because %s tool not available: %v", tool, err) + } else { + t.Fatalf("%s tool not available: %v", tool, err) + } +} diff --git a/pkg/plugin/processor/builtin/internal/diff/unified.go b/pkg/plugin/processor/builtin/internal/diff/unified.go new file mode 100644 index 000000000..cfbda6102 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/diff/unified.go @@ -0,0 +1,251 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package diff + +import ( + "fmt" + "log" + "strings" +) + +// DefaultContextLines is the number of unchanged lines of surrounding +// context displayed by Unified. Use ToUnified to specify a different value. +const DefaultContextLines = 3 + +// Unified returns a unified diff of the old and new strings. +// The old and new labels are the names of the old and new files. +// If the strings are equal, it returns the empty string. +func Unified(oldLabel, newLabel, old, new string) string { + edits := Strings(old, new) + unified, err := ToUnified(oldLabel, newLabel, old, edits, DefaultContextLines) + if err != nil { + // Can't happen: edits are consistent. + log.Fatalf("internal error in diff.Unified: %v", err) + } + return unified +} + +// ToUnified applies the edits to content and returns a unified diff, +// with contextLines lines of (unchanged) context around each diff hunk. +// The old and new labels are the names of the content and result files. +// It returns an error if the edits are inconsistent; see ApplyEdits. +func ToUnified(oldLabel, newLabel, content string, edits []Edit, contextLines int) (string, error) { + u, err := toUnified(oldLabel, newLabel, content, edits, contextLines) + if err != nil { + return "", err + } + return u.String(), nil +} + +// unified represents a set of edits as a unified diff. +type unified struct { + // from is the name of the original file. + from string + // to is the name of the modified file. + to string + // hunks is the set of edit hunks needed to transform the file content. + hunks []*hunk +} + +// Hunk represents a contiguous set of line edits to apply. +type hunk struct { + // The line in the original source where the hunk starts. + fromLine int + // The line in the original source where the hunk finishes. + toLine int + // The set of line based edits to apply. + lines []line +} + +// Line represents a single line operation to apply as part of a Hunk. +type line struct { + // kind is the type of line this represents, deletion, insertion or copy. + kind opKind + // content is the content of this line. + // For deletion it is the line being removed, for all others it is the line + // to put in the output. + content string +} + +// opKind is used to denote the type of operation a line represents. +type opKind int + +const ( + // opDelete is the operation kind for a line that is present in the input + // but not in the output. + opDelete opKind = iota + // opInsert is the operation kind for a line that is new in the output. + opInsert + // opEqual is the operation kind for a line that is the same in the input and + // output, often used to provide context around edited lines. + opEqual +) + +// String returns a human readable representation of an OpKind. It is not +// intended for machine processing. +func (k opKind) String() string { + switch k { + case opDelete: + return "delete" + case opInsert: + return "insert" + case opEqual: + return "equal" + default: + panic("unknown operation kind") + } +} + +// toUnified takes a file contents and a sequence of edits, and calculates +// a unified diff that represents those edits. +func toUnified(fromName, toName string, content string, edits []Edit, contextLines int) (unified, error) { + gap := contextLines * 2 + u := unified{ + from: fromName, + to: toName, + } + if len(edits) == 0 { + return u, nil + } + var err error + edits, err = lineEdits(content, edits) // expand to whole lines + if err != nil { + return u, err + } + lines := splitLines(content) + var h *hunk + last := 0 + toLine := 0 + for _, edit := range edits { + // Compute the zero-based line numbers of the edit start and end. + // TODO(adonovan): opt: compute incrementally, avoid O(n^2). + start := strings.Count(content[:edit.Start], "\n") + end := strings.Count(content[:edit.End], "\n") + if edit.End == len(content) && len(content) > 0 && content[len(content)-1] != '\n' { + end++ // EOF counts as an implicit newline + } + + switch { + case h != nil && start == last: + //direct extension + case h != nil && start <= last+gap: + //within range of previous lines, add the joiners + addEqualLines(h, lines, last, start) + default: + //need to start a new hunk + if h != nil { + // add the edge to the previous hunk + addEqualLines(h, lines, last, last+contextLines) + u.hunks = append(u.hunks, h) + } + toLine += start - last + h = &hunk{ + fromLine: start + 1, + toLine: toLine + 1, + } + // add the edge to the new hunk + delta := addEqualLines(h, lines, start-contextLines, start) + h.fromLine -= delta + h.toLine -= delta + } + last = start + for i := start; i < end; i++ { + h.lines = append(h.lines, line{kind: opDelete, content: lines[i]}) + last++ + } + if edit.New != "" { + for _, content := range splitLines(edit.New) { + h.lines = append(h.lines, line{kind: opInsert, content: content}) + toLine++ + } + } + } + if h != nil { + // add the edge to the final hunk + addEqualLines(h, lines, last, last+contextLines) + u.hunks = append(u.hunks, h) + } + return u, nil +} + +func splitLines(text string) []string { + lines := strings.SplitAfter(text, "\n") + if lines[len(lines)-1] == "" { + lines = lines[:len(lines)-1] + } + return lines +} + +func addEqualLines(h *hunk, lines []string, start, end int) int { + delta := 0 + for i := start; i < end; i++ { + if i < 0 { + continue + } + if i >= len(lines) { + return delta + } + h.lines = append(h.lines, line{kind: opEqual, content: lines[i]}) + delta++ + } + return delta +} + +// String converts a unified diff to the standard textual form for that diff. +// The output of this function can be passed to tools like patch. +func (u unified) String() string { + if len(u.hunks) == 0 { + return "" + } + b := new(strings.Builder) + fmt.Fprintf(b, "--- %s\n", u.from) + fmt.Fprintf(b, "+++ %s\n", u.to) + for _, hunk := range u.hunks { + fromCount, toCount := 0, 0 + for _, l := range hunk.lines { + switch l.kind { + case opDelete: + fromCount++ + case opInsert: + toCount++ + default: + fromCount++ + toCount++ + } + } + fmt.Fprint(b, "@@") + if fromCount > 1 { + fmt.Fprintf(b, " -%d,%d", hunk.fromLine, fromCount) + } else if hunk.fromLine == 1 && fromCount == 0 { + // Match odd GNU diff -u behavior adding to empty file. + fmt.Fprintf(b, " -0,0") + } else { + fmt.Fprintf(b, " -%d", hunk.fromLine) + } + if toCount > 1 { + fmt.Fprintf(b, " +%d,%d", hunk.toLine, toCount) + } else if hunk.toLine == 1 && toCount == 0 { + // Match odd GNU diff -u behavior adding to empty file. + fmt.Fprintf(b, " +0,0") + } else { + fmt.Fprintf(b, " +%d", hunk.toLine) + } + fmt.Fprint(b, " @@\n") + for _, l := range hunk.lines { + switch l.kind { + case opDelete: + fmt.Fprintf(b, "-%s", l.content) + case opInsert: + fmt.Fprintf(b, "+%s", l.content) + default: + fmt.Fprintf(b, " %s", l.content) + } + if !strings.HasSuffix(l.content, "\n") { + fmt.Fprintf(b, "\n\\ No newline at end of file\n") + } + } + } + return b.String() +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/example.go b/pkg/plugin/processor/builtin/internal/exampleutil/example.go new file mode 100644 index 000000000..8211078bb --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/example.go @@ -0,0 +1,131 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package exampleutil + +import ( + "bytes" + "context" + "fmt" + "log" + + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/internal/diff" + "github.com/goccy/go-json" + "github.com/google/go-cmp/cmp" +) + +// -- HELPERS ------------------------------------------------------------------ + +var processors = map[string]*procInfo{} + +type procInfo struct { + Specification sdk.Specification `json:"specification"` + Examples []Example `json:"examples"` +} + +type Example struct { + // Order is an optional field that is used to order examples in the + // documentation. If omitted, the example will be ordered by description. + Order int `json:"-"` + Summary string `json:"summary"` + Description string `json:"description"` + Config map[string]string `json:"config"` + Have opencdc.Record `json:"have"` + Want sdk.ProcessedRecord `json:"want"` +} + +// RunExample runs the given example with the given processor and logs the +// result. It is intended to be used in example functions. Additionally, it +// stores the processor specification and example in a global map so it can be +// used to generate documentation. +func RunExample(p sdk.Processor, e Example) { + spec, err := p.Specification() + if err != nil { + log.Fatalf("failed to fetch specification: %v", err) + } + + pi, ok := processors[spec.Name] + if !ok { + pi = &procInfo{Specification: spec} + processors[spec.Name] = pi + } + + ctx := context.Background() + err = p.Configure(ctx, e.Config) + if err != nil { + log.Fatalf("failed to configure processor: %v", err) + } + + err = p.Open(ctx) + if err != nil { + log.Fatalf("failed to open processor: %v", err) + } + + got := p.Process(ctx, []opencdc.Record{e.Have.Clone()}) + if len(got) != 1 { + log.Fatalf("expected 1 record to be returned, got %d", len(got)) + } + + if d := cmp.Diff(e.Want, got[0], internal.CmpProcessedRecordOpts...); d != "" { + log.Fatalf("processed record did not match expectation:\n%v", d) + } + + switch rec := got[0].(type) { + case sdk.SingleRecord: + // Serialize records to pretty JSON for comparison. + havePrettyJSON, err := recordToPrettyJSON(e.Have) + if err != nil { + log.Fatalf("failed to marshal test record to pretty JSON: %v", err) + } + gotPrettyJSON, err := recordToPrettyJSON(opencdc.Record(rec)) + if err != nil { + log.Fatalf("failed to marshal processed record to pretty JSON: %v", err) + } + + edits := diff.Strings(string(havePrettyJSON), string(gotPrettyJSON)) + unified, err := diff.ToUnified("before", "after", string(havePrettyJSON)+"\n", edits, 100) + if err != nil { + log.Fatalf("failed to produce unified diff: %v", err) + } + + fmt.Printf("processor transformed record:\n%s\n", unified) + case sdk.FilterRecord: + fmt.Println("processor filtered record out") + case sdk.ErrorRecord: + fmt.Printf("processor returned error: %s\n", rec.Error) + } + + // append example to processor + pi.Examples = append(pi.Examples, e) +} + +func recordToPrettyJSON(r opencdc.Record) ([]byte, error) { + serializer := opencdc.JSONSerializer{RawDataAsString: true} + + // Serialize records to pretty JSON for comparison. + haveJSON, err := serializer.Serialize(r) + if err != nil { + return nil, fmt.Errorf("failed to marshal test record to JSON: %w", err) + } + var buf bytes.Buffer + err = json.Indent(&buf, haveJSON, "", " ") + if err != nil { + return nil, fmt.Errorf("failed to indent test record JSON: %w", err) + } + return buf.Bytes(), nil +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/export.go b/pkg/plugin/processor/builtin/internal/exampleutil/export.go new file mode 100644 index 000000000..b2c8ff00c --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/export.go @@ -0,0 +1,90 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package exampleutil + +import ( + "context" + "log" + "os" + "path/filepath" + "runtime" + "sort" + "strings" + + "github.com/conduitio/conduit/pkg/foundation/cerrors" + + "github.com/conduitio/conduit-commons/opencdc" + "github.com/goccy/go-json" +) + +// ExportProcessors exports the processors to the specs directory. +func ExportProcessors() { + _, filename, _, _ := runtime.Caller(0) //nolint:dogsled // this is the idiomatic way to get the current file's path + dir := filepath.Join(filepath.Dir(filename), "specs") + + for _, processor := range sortProcessors(processors) { + path := filepath.Join(dir, processor.Specification.Name+".json") + err := exportProcessor(path, processor) + if err != nil { + log.Fatalf("error: %v", err) + } + } +} + +func exportProcessor(path string, processor *procInfo) error { + output, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0600) + if err != nil { + return cerrors.Errorf("failed to open %s: %w", path, err) + } + defer output.Close() + + ctx := opencdc.WithJSONMarshalOptions(context.Background(), &opencdc.JSONMarshalOptions{RawDataAsString: true}) + enc := json.NewEncoder(output) + enc.SetIndent("", " ") + + err = enc.EncodeContext(ctx, processor) + if err != nil { + return cerrors.Errorf("failed to write processors to output: %w", err) + } + return nil +} + +func sortProcessors(processors map[string]*procInfo) []*procInfo { + names := make([]string, 0, len(processors)) + for name := range processors { + names = append(names, name) + } + sort.Strings(names) + + sorted := make([]*procInfo, len(names)) + for i, name := range names { + // also sort examples for each processor + proc := processors[name] + proc.Examples = sortExamples(proc.Examples) + sorted[i] = proc + } + + return sorted +} + +func sortExamples(examples []Example) []Example { + sort.Slice(examples, func(i, j int) bool { + if examples[i].Order != examples[j].Order { + return examples[i].Order < examples[j].Order + } + return strings.Compare(examples[i].Description, examples[j].Description) < 0 + }) + return examples +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/avro.decode.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/avro.decode.json new file mode 100644 index 000000000..5602f502f --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/avro.decode.json @@ -0,0 +1,100 @@ +{ + "specification": { + "name": "avro.decode", + "summary": "Decodes a field's raw data in the Avro format", + "description": "The processor takes raw data (bytes or a string) in the specified field and decodes\nit from the [Avro format](https://avro.apache.org/) into structured data. It extracts the schema ID from the data,\ndownloads the associated schema from the [schema registry](https://docs.confluent.io/platform/current/schema-registry/index.html)\nand decodes the payload. The schema is cached locally after it's first downloaded.\n\nIf the processor encounters structured data or the data can't be decoded it returns an error.\n\nThis processor is the counterpart to [`avro.encode`](/docs/processors/builtin/avro.encode).", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "auth.basic.password": { + "default": "", + "description": "The password to use with basic authentication. This option is required if\nauth.basic.username contains a value. If both auth.basic.username and auth.basic.password\nare empty basic authentication is disabled.", + "type": "string", + "validations": [] + }, + "auth.basic.username": { + "default": "", + "description": "The username to use with basic authentication. This option is required if\nauth.basic.password contains a value. If both auth.basic.username and auth.basic.password\nare empty basic authentication is disabled.", + "type": "string", + "validations": [] + }, + "field": { + "default": ".Payload.After", + "description": "The field that will be encoded.", + "type": "string", + "validations": [] + }, + "tls.ca.cert": { + "default": "", + "description": "The path to a file containing PEM encoded CA certificates. If this option is empty,\nConduit falls back to using the host's root CA set.", + "type": "string", + "validations": [] + }, + "tls.client.cert": { + "default": "", + "description": "The path to a file containing a PEM encoded certificate. This option is required\nif tls.client.key contains a value. If both tls.client.cert and tls.client.key are empty\nTLS is disabled.", + "type": "string", + "validations": [] + }, + "tls.client.key": { + "default": "", + "description": "The path to a file containing a PEM encoded private key. This option is required\nif tls.client.cert contains a value. If both tls.client.cert and tls.client.key are empty\nTLS is disabled.", + "type": "string", + "validations": [] + }, + "url": { + "default": "", + "description": "URL of the schema registry (e.g. http://localhost:8085)", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + } + ] + } + } + }, + "examples": [ + { + "summary": "Decode a record field in Avro format", + "description": "This example shows the usage of the `avro.decode` processor.\nThe processor decodes the record's`.Key` field using the schema that is\ndownloaded from the schema registry and needs to exist under the subject`example-decode`.\nIn this example we use the following schema:\n\n```json\n{\n \"type\":\"record\",\n \"name\":\"record\",\n \"fields\":[\n {\"name\":\"myString\",\"type\":\"string\"},\n {\"name\":\"myInt\",\"type\":\"int\"}\n ]\n}\n```", + "config": { + "auth.basic.password": "", + "auth.basic.username": "", + "field": ".Key", + "tls.ca.cert": "", + "tls.client.cert": "", + "tls.client.key": "", + "url": "http://127.0.0.1:54322" + }, + "have": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": "\u0000\u0000\u0000\u0000\u0001\u0006bar\u0002", + "payload": { + "before": null, + "after": null + } + }, + "want": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": { + "myInt": 1, + "myString": "bar" + }, + "payload": { + "before": null, + "after": null + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/avro.encode.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/avro.encode.json new file mode 100644 index 000000000..cf2b4aed1 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/avro.encode.json @@ -0,0 +1,195 @@ +{ + "specification": { + "name": "avro.encode", + "summary": "Encodes a record's field into the Avro format", + "description": "The processor takes a record's field and encodes it using a schema into the [Avro format](https://avro.apache.org/).\nIt provides two strategies for determining the schema:\n\n* **preRegistered** (recommended)\n This strategy downloads an existing schema from the schema registry and uses it to encode the record.\n This requires the schema to already be registered in the schema registry. The schema is downloaded\n only once and cached locally.\n* **autoRegister** (for development purposes)\n This strategy infers the schema by inspecting the structured data and registers it in the schema\n registry. If the record schema is known in advance it's recommended to use the preRegistered strategy\n and manually register the schema, as this strategy comes with limitations.\n\n The strategy uses reflection to traverse the structured data of each record and determine the type\n of each field. If a specific field is set to nil the processor won't have enough information to determine\n the type and will default to a nullable string. Because of this it is not guaranteed that two records\n with the same structure produce the same schema or even a backwards compatible schema. The processor\n registers each inferred schema in the schema registry with the same subject, therefore the schema compatibility\n checks need to be disabled for this schema to prevent failures. If the schema subject does not exist before running\n this processor, it will automatically set the correct compatibility settings in the schema registry.\n\nThis processor is the counterpart to [`avro.decode`](/docs/processors/builtin/avro.decode).", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "auth.basic.password": { + "default": "", + "description": "The password to use with basic authentication. This option is required if\nauth.basic.username contains a value. If both auth.basic.username and auth.basic.password\nare empty basic authentication is disabled.", + "type": "string", + "validations": [] + }, + "auth.basic.username": { + "default": "", + "description": "The username to use with basic authentication. This option is required if\nauth.basic.password contains a value. If both auth.basic.username and auth.basic.password\nare empty basic authentication is disabled.", + "type": "string", + "validations": [] + }, + "field": { + "default": ".Payload.After", + "description": "The field that will be encoded.", + "type": "string", + "validations": [] + }, + "schema.autoRegister.subject": { + "default": "", + "description": "The subject name under which the inferred schema will be registered in the schema registry.", + "type": "string", + "validations": [] + }, + "schema.preRegistered.subject": { + "default": "", + "description": "The subject of the schema in the schema registry used to encode the record.", + "type": "string", + "validations": [] + }, + "schema.preRegistered.version": { + "default": "", + "description": "The version of the schema in the schema registry used to encode the record.", + "type": "int", + "validations": [ + { + "type": "greater-than", + "value": "0" + } + ] + }, + "schema.strategy": { + "default": "", + "description": "Strategy to use to determine the schema for the record.\nAvailable strategies are:\n* `preRegistered` (recommended) - Download an existing schema from the schema registry.\n This strategy is further configured with options starting with `schema.preRegistered.*`.\n* `autoRegister` (for development purposes) - Infer the schema from the record and register it\n in the schema registry. This strategy is further configured with options starting with\n `schema.autoRegister.*`.\n\nFor more information about the behavior of each strategy read the main processor description.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + }, + { + "type": "inclusion", + "value": "preRegistered,autoRegister" + } + ] + }, + "tls.ca.cert": { + "default": "", + "description": "The path to a file containing PEM encoded CA certificates. If this option is empty,\nConduit falls back to using the host's root CA set.", + "type": "string", + "validations": [] + }, + "tls.client.cert": { + "default": "", + "description": "The path to a file containing a PEM encoded certificate. This option is required\nif tls.client.key contains a value. If both tls.client.cert and tls.client.key are empty\nTLS is disabled.", + "type": "string", + "validations": [] + }, + "tls.client.key": { + "default": "", + "description": "The path to a file containing a PEM encoded private key. This option is required\nif tls.client.cert contains a value. If both tls.client.cert and tls.client.key are empty\nTLS is disabled.", + "type": "string", + "validations": [] + }, + "url": { + "default": "", + "description": "URL of the schema registry (e.g. http://localhost:8085)", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + } + ] + } + } + }, + "examples": [ + { + "summary": "Auto-register schema", + "description": "This example shows the usage of the `avro.encode` processor\nwith the `autoRegister` schema strategy. The processor encodes the record's\n`.Payload.After` field using the schema that is extracted from the data\nand registered on the fly under the subject `example-autoRegister`.", + "config": { + "auth.basic.password": "", + "auth.basic.username": "", + "field": ".Payload.After", + "schema.autoRegister.subject": "example-autoRegister", + "schema.preRegistered.subject": "", + "schema.preRegistered.version": "", + "schema.strategy": "autoRegister", + "tls.ca.cert": "", + "tls.client.cert": "", + "tls.client.key": "", + "url": "http://127.0.0.1:54322" + }, + "have": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": null, + "payload": { + "before": null, + "after": { + "myFloat": 2.3, + "myInt": 1, + "myMap": { + "bar": 2.2, + "foo": true + }, + "myString": "bar", + "myStruct": { + "bar": false, + "foo": 1 + } + } + } + }, + "want": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": null, + "payload": { + "before": null, + "after": "\u0000\u0000\u0000\u0000\u0001ffffff\u0002@\u0002\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd\u0001@\u0001\u0006bar\u0000\u0002" + } + } + }, + { + "summary": "Pre-register schema", + "description": "This example shows the usage of the `avro.encode` processor\nwith the `preRegistered` schema strategy. When using this strategy, the\nschema has to be manually pre-registered. In this example we use the following schema:\n\n```json\n{\n \"type\":\"record\",\n \"name\":\"record\",\n \"fields\":[\n {\"name\":\"myString\",\"type\":\"string\"},\n {\"name\":\"myInt\",\"type\":\"int\"}\n ]\n}\n```\n\nThe processor encodes the record's`.Key` field using the above schema.", + "config": { + "auth.basic.password": "", + "auth.basic.username": "", + "field": ".Key", + "schema.autoRegister.subject": "", + "schema.preRegistered.subject": "example-preRegistered", + "schema.preRegistered.version": "1", + "schema.strategy": "preRegistered", + "tls.ca.cert": "", + "tls.client.cert": "", + "tls.client.key": "", + "url": "http://127.0.0.1:54322" + }, + "have": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": { + "myInt": 1, + "myString": "bar" + }, + "payload": { + "before": null, + "after": null + } + }, + "want": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": "\u0000\u0000\u0000\u0000\u0001\u0006bar\u0002", + "payload": { + "before": null, + "after": null + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/base64.decode.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/base64.decode.json new file mode 100644 index 000000000..772282b12 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/base64.decode.json @@ -0,0 +1,63 @@ +{ + "specification": { + "name": "base64.decode", + "summary": "Decode a field to base64.", + "description": "The processor will decode the value of the target field from base64 and store the\nresult in the target field. It is not allowed to decode the `.Position` field.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": "", + "description": "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nNote that it is not allowed to base64 decode the `.Position` field.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + }, + { + "type": "exclusion", + "value": ".Position" + } + ] + } + } + }, + "examples": [ + { + "summary": "Decode a base64 encoded string", + "description": "This example decodes the base64 encoded string stored in\n`.Payload.After`. Note that the result is a string, so if you want to\nfurther process the result (e.g. parse the string as JSON), you need to chain\nother processors (e.g. [`json.decode`](/docs/processors/builtin/json.decode)).", + "config": { + "field": ".Payload.After.foo" + }, + "have": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": "test-key", + "payload": { + "before": null, + "after": { + "foo": "YmFy" + } + } + }, + "want": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": "test-key", + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/base64.encode.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/base64.encode.json new file mode 100644 index 000000000..5bc0428a8 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/base64.encode.json @@ -0,0 +1,98 @@ +{ + "specification": { + "name": "base64.encode", + "summary": "Encode a field to base64.", + "description": "The processor will encode the value of the target field to base64 and store the\nresult in the target field. It is not allowed to encode the `.Position` field.\nIf the provided field doesn't exist, the processor will create that field and\nassign its value.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": "", + "description": "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nNote that it is not allowed to base64 encode the `.Position` field.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + }, + { + "type": "exclusion", + "value": ".Position" + } + ] + } + } + }, + "examples": [ + { + "summary": "Encode record key to base64", + "description": "TThis example takes a record containing raw data in\n.Key and converts it into a base64 encoded string.", + "config": { + "field": ".Key" + }, + "have": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": "test-key", + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + }, + "want": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": "dGVzdC1rZXk=", + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + } + }, + { + "summary": "Encode nested value to base64", + "description": "This example takes a record containing a string in\n.Payload.Before.foo and converts it into a base64 encoded string.", + "config": { + "field": ".Payload.After.foo" + }, + "have": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": "test-key", + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + }, + "want": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": "test-key", + "payload": { + "before": null, + "after": { + "foo": "YmFy" + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/custom.javascript.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/custom.javascript.json new file mode 100644 index 000000000..07f721361 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/custom.javascript.json @@ -0,0 +1,58 @@ +{ + "specification": { + "name": "custom.javascript", + "summary": "JavaScript processor", + "description": "A processor that makes it possible to process Conduit records using JavaScript.\n\nThe following helper functions and fields are available:\n* logger: a logger that outputs to Conduit's logs. Check zerolog's API on how to use it.\n* Record(): constructs a new record which represents a successful processing result.\nIt's analogous to sdk.SingleRecord from Conduit's Go processor SDK.\n* RawData(): creates a raw data object. It's analogous to opencdc.RawData. Optionally, it\naccepts a string argument, which will be cast into a byte array, for example: record.Key = RawData(\"new key\").\n* StructuredData(): creates a structured data (map-like) object.\n\nTo find out what's possible with the JS processors, also refer to the documentation for \n[goja](https://github.com/dop251/goja), which is the JavaScript engine we use.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "script": { + "default": "", + "description": "JavaScript code for this processor.\nIt needs to have a function `process()` that accept\na record and returns a record.\nThe `process()` function can either modify the input record and return it,\nor create a new record.\nIf a record needs to be filtered (dropped from the pipeline),\nthen the `process()` function should return a `null`.", + "type": "string", + "validations": [] + }, + "script.path": { + "default": "", + "description": "The path to a .js file containing the processor code.", + "type": "string", + "validations": [] + } + } + }, + "examples": [ + { + "summary": "Modify a record's metadata and payload using JavaScript", + "description": "In this example we use the `custom.javascript` processor to add a metadata key to the input record. It also prepends \"hello, \" to `.Payload.After`.", + "config": { + "script": "function process(rec) {\n rec.Metadata[\"processed\"] = \"true\";\n let existing = String.fromCharCode.apply(String, rec.Payload.After);\n rec.Payload.After = RawData(\"hello, \" + existing);\n return rec;\n}", + "script.path": "" + }, + "have": { + "position": null, + "operation": "Operation(0)", + "metadata": { + "existing-key": "existing-value" + }, + "key": null, + "payload": { + "before": null, + "after": "world" + } + }, + "want": { + "position": null, + "operation": "Operation(0)", + "metadata": { + "existing-key": "existing-value", + "processed": "true" + }, + "key": null, + "payload": { + "before": null, + "after": "hello, world" + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.convert.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.convert.json new file mode 100644 index 000000000..b878c2cdd --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.convert.json @@ -0,0 +1,151 @@ +{ + "specification": { + "name": "field.convert", + "summary": "Convert the type of a field.", + "description": "Convert takes the field of one type and converts it into another type (e.g. string to integer). \nThe applicable types are string, int, float and bool. Converting can be done between any combination of types. Note that\nbooleans will be converted to numeric values 1 (true) and 0 (false). Processor is only applicable to .Key, .Payload.Before\nand .Payload.After prefixes, and only applicable if said fields contain structured data.\nIf the record contains raw JSON data, then use the processor [json.decode](/docs/processors/builtin/json.decode)\nto parse it into structured data first.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": "", + "description": "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nyou can only convert fields that are under .Key and .Payload, and said fields should contain structured data.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + }, + { + "type": "regex", + "value": "^\\.(Payload|Key).*" + } + ] + }, + "type": { + "default": "", + "description": "Type is the target field type after conversion, available options are: string, int, float, bool.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + }, + { + "type": "inclusion", + "value": "string,int,float,bool" + } + ] + } + } + }, + "examples": [ + { + "summary": "Convert `float` to `string`", + "description": "This example takes the `float` in field `.Key.id` and changes its data type to `string`.", + "config": { + "field": ".Key.id", + "type": "string" + }, + "have": { + "position": null, + "operation": "update", + "metadata": null, + "key": { + "id": 123.345 + }, + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + }, + "want": { + "position": null, + "operation": "update", + "metadata": null, + "key": { + "id": "123.345" + }, + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + } + }, + { + "summary": "Convert `int` to `bool`", + "description": "This example takes the `int` in field `.Payload.After.done` and changes its data type to `bool`.", + "config": { + "field": ".Payload.After.done", + "type": "bool" + }, + "have": { + "position": null, + "operation": "update", + "metadata": null, + "key": { + "id": "123" + }, + "payload": { + "before": null, + "after": { + "done": 1 + } + } + }, + "want": { + "position": null, + "operation": "update", + "metadata": null, + "key": { + "id": "123" + }, + "payload": { + "before": null, + "after": { + "done": true + } + } + } + }, + { + "summary": "Convert `string` to `int`", + "description": "This example takes the string in field `.Key.id` and changes its data type to `int`.", + "config": { + "field": ".Key.id", + "type": "int" + }, + "have": { + "position": null, + "operation": "update", + "metadata": null, + "key": { + "id": "123" + }, + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + }, + "want": { + "position": null, + "operation": "update", + "metadata": null, + "key": { + "id": 123 + }, + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.exclude.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.exclude.json new file mode 100644 index 000000000..f47f383bf --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.exclude.json @@ -0,0 +1,102 @@ +{ + "specification": { + "name": "field.exclude", + "summary": "Remove a subset of fields from the record.", + "description": "Remove a subset of fields from the record, all the other fields are left untouched.\nIf a field is excluded that contains nested data, the whole tree will be removed.\nIt is not allowed to exclude .Position or .Operation fields.\n\nNote that this processor only runs on structured data, if the record contains\nraw JSON data, then use the processor [json.decode](/docs/processors/builtin/json.decode)\nto parse it into structured data first.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "fields": { + "default": "", + "description": "Fields is a comma separated list of target fields, as they would be addressed in a Go template (e.g. `.Metadata,.Payload.After.foo`).", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + } + ] + } + } + }, + "examples": [ + { + "summary": "Exclude all fields in payload", + "description": "Excluding all fields in `.Payload` results in an empty payload.", + "config": { + "fields": ".Payload" + }, + "have": { + "position": null, + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": null, + "payload": { + "before": { + "bar": "baz" + }, + "after": { + "foo": "bar" + } + } + }, + "want": { + "position": null, + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": null, + "payload": { + "before": null, + "after": null + } + } + }, + { + "summary": "Exclude multiple fields", + "description": "It's possible to exclude multiple fields by providing a\ncomma-separated list of fields. In this example, we exclude .Metadata,\n.Payload.After.foo and .Key.key1.", + "config": { + "fields": ".Metadata,.Payload.After.foo,.Key.key1" + }, + "have": { + "position": null, + "operation": "create", + "metadata": { + "source": "s3" + }, + "key": { + "key1": "val1", + "key2": "val2" + }, + "payload": { + "before": { + "bar": "baz" + }, + "after": { + "foo": "bar", + "foobar": "baz" + } + } + }, + "want": { + "position": null, + "operation": "create", + "metadata": {}, + "key": { + "key2": "val2" + }, + "payload": { + "before": { + "bar": "baz" + }, + "after": { + "foobar": "baz" + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.rename.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.rename.json new file mode 100644 index 000000000..464ace20b --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.rename.json @@ -0,0 +1,63 @@ +{ + "specification": { + "name": "field.rename", + "summary": "Rename a group of fields.", + "description": "Rename a group of field names to new names. It is not\nallowed to rename top-level fields (`.Operation`, `.Position`, \n`.Key`, `.Metadata`, `.Payload.Before`, `.Payload.After`).\n\nNote that this processor only runs on structured data, if the record contains raw\nJSON data, then use the processor [json.decode](/docs/processors/builtin/json.decode)\nto parse it into structured data first.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "mapping": { + "default": "", + "description": "Mapping is a comma separated list of keys and values for fields and their new names (keys and values\nare separated by colons \":\"). For example: `.Metadata.key:id,.Payload.After.foo:bar`.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + } + ] + } + } + }, + "examples": [ + { + "summary": "Rename multiple fields", + "description": "This example renames the fields in .Metadata and\n.Payload.After as specified in the mapping configuration parameter.", + "config": { + "mapping": ".Metadata.key1:newKey,.Payload.After.foo:newFoo" + }, + "have": { + "position": null, + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": null, + "payload": { + "before": { + "bar": "baz" + }, + "after": { + "foo": "bar" + } + } + }, + "want": { + "position": null, + "operation": "create", + "metadata": { + "newKey": "val1" + }, + "key": null, + "payload": { + "before": { + "bar": "baz" + }, + "after": { + "newFoo": "bar" + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.set.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.set.json new file mode 100644 index 000000000..364c28877 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/field.set.json @@ -0,0 +1,137 @@ +{ + "specification": { + "name": "field.set", + "summary": "Set the value of a certain field.", + "description": "Set the value of a certain field to any value. It is not allowed to set the .Position field.\nThe new value can be a Go template expression, the processor will evaluate the output and assign the value to the target field.\nIf the \"field\" provided doesn't exist, the processor will create that field and assign its value.\nThis processor can be used for multiple purposes, like extracting fields, hoisting data, inserting fields, copying fields, masking fields, etc.\nNote that this processor only runs on structured data, if the record contains raw JSON data, then use the processor\n[json.decode](/docs/processors/builtin/json.decode) to parse it into structured data first.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": "", + "description": "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nNote that it is not allowed to set the .Position field.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + }, + { + "type": "exclusion", + "value": ".Position" + } + ] + }, + "value": { + "default": "", + "description": "Value is a Go template expression which will be evaluated and stored in `field` (e.g. `{{ .Payload.After }}`).", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + } + ] + } + } + }, + "examples": [ + { + "summary": "Add field", + "description": "This example adds a new field to the record. The field is\nadded to .Payload.After and is set to bar.", + "config": { + "field": ".Payload.After.foo", + "value": "bar" + }, + "have": { + "position": null, + "operation": "snapshot", + "metadata": null, + "key": { + "my-key": "id" + }, + "payload": { + "before": null, + "after": null + } + }, + "want": { + "position": null, + "operation": "snapshot", + "metadata": null, + "key": { + "my-key": "id" + }, + "payload": { + "before": null, + "after": { + "foo": "bar" + } + } + } + }, + { + "summary": "Sets the record operation to `update`", + "description": "This example sets the `.Operation` field to `update` for all records.", + "config": { + "field": ".Operation", + "value": "update" + }, + "have": { + "position": null, + "operation": "create", + "metadata": null, + "key": null, + "payload": { + "before": null, + "after": null + } + }, + "want": { + "position": null, + "operation": "update", + "metadata": null, + "key": null, + "payload": { + "before": null, + "after": null + } + } + }, + { + "summary": "Set field using Go template", + "description": "This example sets the `.Payload.After.postgres` field to `true` if the `.Metadata.table` field contains `postgres`.", + "config": { + "field": ".Payload.After.postgres", + "value": "{{ eq .Metadata.table \"postgres\" }}" + }, + "have": { + "position": null, + "operation": "snapshot", + "metadata": { + "table": "postgres" + }, + "key": null, + "payload": { + "before": null, + "after": { + "postgres": "false" + } + } + }, + "want": { + "position": null, + "operation": "snapshot", + "metadata": { + "table": "postgres" + }, + "key": null, + "payload": { + "before": null, + "after": { + "postgres": "true" + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/filter.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/filter.json new file mode 100644 index 000000000..9a9ed4342 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/filter.json @@ -0,0 +1,34 @@ +{ + "specification": { + "name": "filter", + "summary": "Acknowledges all records that get passed to the filter.", + "description": "Acknowledges all records that get passed to the filter, so the records will be filtered out if \nthe condition provided to the processor is evaluated to \"true\".\nMake sure to add a condition to this processor, otherwise all records will be filtered out.\"", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": {} + }, + "examples": [ + { + "summary": "Filter out the record", + "description": "", + "config": {}, + "have": { + "position": null, + "operation": "create", + "metadata": { + "key1": "val1" + }, + "key": null, + "payload": { + "before": { + "bar": "baz" + }, + "after": { + "foo": "bar" + } + } + }, + "want": {} + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/json.decode.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/json.decode.json new file mode 100644 index 000000000..fd84af6c4 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/json.decode.json @@ -0,0 +1,101 @@ +{ + "specification": { + "name": "json.decode", + "summary": "Decodes a specific field from JSON raw data (string) to structured data.", + "description": "The processor takes JSON raw data (`string` or `[]byte`)\nfrom the target field, parses it as JSON structured data and stores the decoded\nstructured data in the target field.\n\nThis processor is only applicable to fields under `.Key`, `.Payload`.Before and\n`.Payload.After`, as they can contain structured data.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": "", + "description": "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nyou can only decode fields that are under .Key and .Payload.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + }, + { + "type": "regex", + "value": "^\\.(Payload|Key).*" + }, + { + "type": "exclusion", + "value": ".Payload" + } + ] + } + } + }, + "examples": [ + { + "summary": "Decode record key as JSON", + "description": "This example takes a record containing a raw JSON string in\n.Key and converts it into structured data.", + "config": { + "field": ".Key" + }, + "have": { + "position": null, + "operation": "create", + "metadata": null, + "key": "{\"after\":{\"data\":4,\"id\":3}}", + "payload": { + "before": null, + "after": null + } + }, + "want": { + "position": null, + "operation": "create", + "metadata": null, + "key": { + "after": { + "data": 4, + "id": 3 + } + }, + "payload": { + "before": null, + "after": null + } + } + }, + { + "summary": "Decode nested field as JSON", + "description": "This example takes a record containing a raw JSON string in\n.Payload.Before.foo and converts it into a map.", + "config": { + "field": ".Payload.Before.foo" + }, + "have": { + "position": null, + "operation": "snapshot", + "metadata": null, + "key": null, + "payload": { + "before": { + "foo": "{\"before\":{\"data\":4,\"id\":3},\"baz\":\"bar\"}" + }, + "after": null + } + }, + "want": { + "position": null, + "operation": "snapshot", + "metadata": null, + "key": null, + "payload": { + "before": { + "foo": { + "baz": "bar", + "before": { + "data": 4, + "id": 3 + } + } + }, + "after": null + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/json.encode.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/json.encode.json new file mode 100644 index 000000000..e7b3f1da4 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/json.encode.json @@ -0,0 +1,100 @@ +{ + "specification": { + "name": "json.encode", + "summary": "Encodes a specific field from structured data to JSON raw data (string).", + "description": "The processor takes data from the target field, encodes it into s JSON value\nand stores the encoded value in the target field.\n\nThis processor is only applicable to fields under `.Key`, `.Payload`.Before and\n`.Payload.After`, as they can contain structured data.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": "", + "description": "Field is the target field, as it would be addressed in a Go template (e.g. `.Payload.After.foo`).\nyou can only encode fields that are under .Key, .Payload.Before and .Payload.After.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + }, + { + "type": "regex", + "value": "^\\.(Payload|Key).*" + }, + { + "type": "exclusion", + "value": ".Payload" + } + ] + } + } + }, + "examples": [ + { + "summary": "Encode nested field to JSON", + "description": "This example takes a record containing a map in\n.Payload.Before.foo and converts it into a raw JSON string.", + "config": { + "field": ".Payload.Before.foo" + }, + "have": { + "position": null, + "operation": "snapshot", + "metadata": null, + "key": null, + "payload": { + "before": { + "foo": { + "baz": "bar", + "before": { + "data": 4, + "id": 3 + } + } + }, + "after": null + } + }, + "want": { + "position": null, + "operation": "snapshot", + "metadata": null, + "key": null, + "payload": { + "before": { + "foo": "{\"baz\":\"bar\",\"before\":{\"data\":4,\"id\":3}}" + }, + "after": null + } + } + }, + { + "summary": "Encode record key to JSON", + "description": "This example takes a record containing structured data in\n.Key and converts it into a raw JSON string.", + "config": { + "field": ".Key" + }, + "have": { + "position": null, + "operation": "create", + "metadata": null, + "key": { + "tables": [ + "table1,table2" + ] + }, + "payload": { + "before": null, + "after": null + } + }, + "want": { + "position": null, + "operation": "create", + "metadata": null, + "key": "{\"tables\":[\"table1,table2\"]}", + "payload": { + "before": null, + "after": null + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.debezium.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.debezium.json new file mode 100644 index 000000000..de23a18f3 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.debezium.json @@ -0,0 +1,62 @@ +{ + "specification": { + "name": "unwrap.debezium", + "summary": "Unwraps a Debezium record from the input OpenCDC record.", + "description": "In this processor, the wrapped (Debezium) record replaces the wrapping record (being processed) \ncompletely, except for the position.\n\nThe Debezium record's metadata and the wrapping record's metadata is merged, with the Debezium metadata having precedence.\n\nThis is useful in cases where Conduit acts as an intermediary between a Debezium source and a Debezium destination. \nIn such cases, the Debezium record is set as the OpenCDC record's payload, and needs to be unwrapped for further usage.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": ".Payload.After", + "description": "Field is a reference to the field which contains the Debezium record.\n\nFor more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66.", + "type": "string", + "validations": [ + { + "type": "regex", + "value": "^.Payload" + } + ] + } + } + }, + "examples": [ + { + "summary": "Unwrap a Debezium record", + "description": "This example how to unwrap a Debezium record from a field nested in a record's\n.Payload.After field. It additionally shows how the key is unwrapped, and the metadata merged.", + "config": { + "field": ".Payload.After.nested" + }, + "have": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "metadata-key": "metadata-value" + }, + "key": "{\"payload\":\"27\"}", + "payload": { + "before": null, + "after": { + "nested": "{\n \"payload\": {\n \"after\": {\n \"description\": \"test1\",\n \"id\": 27\n },\n \"before\": null,\n \"op\": \"c\",\n \"source\": {\n \"opencdc.readAt\": \"1674061777225877000\",\n \"opencdc.version\": \"v1\"\n },\n \"transaction\": null,\n \"ts_ms\": 1674061777225\n },\n \"schema\": {}\n}" + } + } + }, + "want": { + "position": "dGVzdC1wb3NpdGlvbg==", + "operation": "create", + "metadata": { + "metadata-key": "metadata-value", + "opencdc.readAt": "1674061777225877000", + "opencdc.version": "v1" + }, + "key": "27", + "payload": { + "before": null, + "after": { + "description": "test1", + "id": 27 + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.kafkaconnect.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.kafkaconnect.json new file mode 100644 index 000000000..65de91474 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.kafkaconnect.json @@ -0,0 +1,64 @@ +{ + "specification": { + "name": "unwrap.kafkaconnect", + "summary": "Unwraps a Kafka Connect record from an OpenCDC record.", + "description": "This processor unwraps a Kafka Connect record from the input OpenCDC record.\n\nThe input record's payload is replaced with the Kafka Connect record.\n\nThis is useful in cases where Conduit acts as an intermediary between a Debezium source and a Debezium destination. \nIn such cases, the Debezium record is set as the OpenCDC record's payload, and needs to be unwrapped for further usage.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": ".Payload.After", + "description": "Field is a reference to the field which contains the Kafka Connect record.\n\nFor more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66.", + "type": "string", + "validations": [ + { + "type": "regex", + "value": "^.Payload" + } + ] + } + } + }, + "examples": [ + { + "summary": "Unwrap a Kafka Connect record", + "description": "This example shows how to unwrap a Kafka Connect record.\n\nThe Kafka Connect record is serialized as a JSON string in the `.Payload.After` field (raw data).\nThe Kafka Connect record's payload will replace the OpenCDC record's payload.\n\nWe also see how the key is unwrapped too. In this case, the key comes in as structured data.", + "config": { + "field": ".Payload.After" + }, + "have": { + "position": "dGVzdCBwb3NpdGlvbg==", + "operation": "create", + "metadata": { + "metadata-key": "metadata-value" + }, + "key": { + "payload": { + "id": 27 + }, + "schema": {} + }, + "payload": { + "before": null, + "after": "{\n\"payload\": {\n \"description\": \"test2\"\n},\n\"schema\": {}\n}" + } + }, + "want": { + "position": "dGVzdCBwb3NpdGlvbg==", + "operation": "create", + "metadata": { + "metadata-key": "metadata-value" + }, + "key": { + "id": 27 + }, + "payload": { + "before": null, + "after": { + "description": "test2" + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.opencdc.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.opencdc.json new file mode 100644 index 000000000..754b41207 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/unwrap.opencdc.json @@ -0,0 +1,67 @@ +{ + "specification": { + "name": "unwrap.opencdc", + "summary": "A processor that unwraps the OpenCDC record saved in one of record's fields.", + "description": "The unwrap.opencdc processor is useful in situations where a record goes through intermediate \nsystems before being written to a final destination. In these cases, the original OpenCDC record is part of the payload \nread from the intermediate system and needs to be unwrapped before being written.\n\nNote: if the wrapped OpenCDC record is not in a structured data field, then it's assumed that it's stored in JSON format.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "field": { + "default": ".Payload.After", + "description": "Field is a reference to the field which contains the OpenCDC record.\n\nFor more information about record references, see: https://github.com/ConduitIO/conduit-processor-sdk/blob/cbdc5dcb5d3109f8f13b88624c9e360076b0bcdb/util.go#L66.", + "type": "string", + "validations": [] + } + } + }, + "examples": [ + { + "summary": "Unwrap an OpenCDC record", + "description": "In this example we use the `unwrap.opencdc` processor to unwrap the OpenCDC record found in the record's `.Payload.After` field.", + "config": { + "field": ".Payload.After" + }, + "have": { + "position": "d3JhcHBpbmcgcG9zaXRpb24=", + "operation": "create", + "metadata": {}, + "key": "wrapping key", + "payload": { + "before": null, + "after": { + "key": { + "id": "test-key" + }, + "metadata": {}, + "operation": "update", + "payload": { + "before": null, + "after": { + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "sensor_id": 1250383582, + "triggered": false + } + }, + "position": "dGVzdC1wb3NpdGlvbg==" + } + } + }, + "want": { + "position": "d3JhcHBpbmcgcG9zaXRpb24=", + "operation": "update", + "metadata": {}, + "key": { + "id": "test-key" + }, + "payload": { + "before": null, + "after": { + "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", + "sensor_id": 1250383582, + "triggered": false + } + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/exampleutil/specs/webhook.http.json b/pkg/plugin/processor/builtin/internal/exampleutil/specs/webhook.http.json new file mode 100644 index 000000000..fca00e9c0 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/exampleutil/specs/webhook.http.json @@ -0,0 +1,126 @@ +{ + "specification": { + "name": "webhook.http", + "summary": "HTTP webhook processor", + "description": "A processor that sends an HTTP request to the specified URL, retries on error and \nsaves the response body and, optionally, the response status.", + "version": "v0.1.0", + "author": "Meroxa, Inc.", + "parameters": { + "backoffRetry.count": { + "default": "0", + "description": "BackoffRetryCount is the maximum number of retries for an individual record\nwhen backing off following an error.", + "type": "float", + "validations": [ + { + "type": "greater-than", + "value": "-1" + } + ] + }, + "backoffRetry.factor": { + "default": "2", + "description": "BackoffRetryFactor is the multiplying factor for each increment step.", + "type": "float", + "validations": [ + { + "type": "greater-than", + "value": "0" + } + ] + }, + "backoffRetry.max": { + "default": "5s", + "description": "BackoffRetryMax is the maximum waiting time before retrying.", + "type": "duration", + "validations": [] + }, + "backoffRetry.min": { + "default": "100ms", + "description": "BackoffRetryMin is the minimum waiting time before retrying.", + "type": "duration", + "validations": [] + }, + "request.body": { + "default": ".", + "description": "RequestBodyRef specifies which field from the input record\nshould be used as the body in the HTTP request.\nThe value of this parameter should be a valid record field reference:\nSee: sdk.NewReferenceResolver", + "type": "string", + "validations": [] + }, + "request.contentType": { + "default": "application/json", + "description": "ContentType is the value of the Content-Type header.", + "type": "string", + "validations": [] + }, + "request.method": { + "default": "POST", + "description": "Method is the HTTP request method to be used.", + "type": "string", + "validations": [] + }, + "request.url": { + "default": "", + "description": "URL used in the HTTP request.", + "type": "string", + "validations": [ + { + "type": "required", + "value": "" + } + ] + }, + "response.body": { + "default": ".Payload.After", + "description": "ResponseBodyRef specifies to which field should the\nresponse body be saved to.\nThe value of this parameter should be a valid record field reference:\nSee: sdk.NewReferenceResolver", + "type": "string", + "validations": [] + }, + "response.status": { + "default": "", + "description": "ResponseStatusRef specifies to which field should the\nresponse status be saved to.\nThe value of this parameter should be a valid record field reference.\nIf no value is set, then the response status will NOT be saved.\nSee: sdk.NewReferenceResolver", + "type": "string", + "validations": [] + } + } + }, + "examples": [ + { + "summary": "Send a request to an HTTP server", + "description": "\nThis example shows how to use the HTTP processor to send a record's .Payload.After field to a dummy HTTP server\nthat replies back with a greeting.\n\nThe record's .Payload.After is overwritten with the response. Additionally, the example shows how to store the\nvalue of the HTTP response's code in the metadata field http_status.", + "config": { + "backoffRetry.count": "0", + "backoffRetry.factor": "2", + "backoffRetry.max": "5s", + "backoffRetry.min": "100ms", + "request.body": ".Payload.After", + "request.contentType": "application/json", + "request.method": "POST", + "request.url": "http://127.0.0.1:54321", + "response.body": ".Payload.After", + "response.status": ".Metadata[\"http_status\"]" + }, + "have": { + "position": null, + "operation": "Operation(0)", + "metadata": null, + "key": null, + "payload": { + "before": null, + "after": "world" + } + }, + "want": { + "position": null, + "operation": "Operation(0)", + "metadata": { + "http_status": "200" + }, + "key": null, + "payload": { + "before": null, + "after": "hello, world" + } + } + } + ] +} diff --git a/pkg/plugin/processor/builtin/internal/references.go b/pkg/plugin/processor/builtin/internal/references.go new file mode 100644 index 000000000..dd7d33758 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/references.go @@ -0,0 +1,25 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +const ( + KeyReference = ".Key" + OperationReference = ".Operation" + MetadataReference = ".Metadata" + PositionReference = ".Position" + PayloadReference = ".Payload" + PayloadAfterReference = ".Payload.After" + PayloadBeforeReference = ".Payload.Before" +) diff --git a/pkg/plugin/processor/builtin/internal/util.go b/pkg/plugin/processor/builtin/internal/util.go new file mode 100644 index 000000000..132736cf8 --- /dev/null +++ b/pkg/plugin/processor/builtin/internal/util.go @@ -0,0 +1,37 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package internal + +import ( + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" +) + +// CmpProcessedRecordOpts is a list of options to use when comparing processed +// records. +var CmpProcessedRecordOpts = []cmp.Option{ + cmpopts.IgnoreUnexported(sdk.SingleRecord{}), + cmp.Comparer(func(e1, e2 error) bool { + switch { + case e1 == nil && e2 == nil: + return true + case e1 != nil && e2 != nil: + return e1.Error() == e2.Error() + default: + return false + } + }), +} diff --git a/pkg/plugin/processor/builtin/registry.go b/pkg/plugin/processor/builtin/registry.go new file mode 100644 index 000000000..03951fd43 --- /dev/null +++ b/pkg/plugin/processor/builtin/registry.go @@ -0,0 +1,201 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builtin + +import ( + "context" + "reflect" + "runtime/debug" + + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/avro" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/base64" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/custom" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/field" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/json" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/unwrap" + "github.com/conduitio/conduit/pkg/plugin/processor/builtin/impl/webhook" +) + +var DefaultBuiltinProcessors = map[string]ProcessorPluginConstructor{ + "avro.decode": avro.NewDecodeProcessor, + "avro.encode": avro.NewEncodeProcessor, + "base64.decode": base64.NewDecodeProcessor, + "base64.encode": base64.NewEncodeProcessor, + "custom.javascript": custom.NewJavascriptProcessor, + "filter": impl.NewFilterProcessor, + "field.convert": field.NewConvertProcessor, + "field.exclude": field.NewExcludeProcessor, + "field.rename": field.NewRenameProcessor, + "field.set": field.NewSetProcessor, + "json.decode": json.NewDecodeProcessor, + "json.encode": json.NewEncodeProcessor, + "unwrap.debezium": unwrap.NewDebeziumProcessor, + "unwrap.kafkaconnect": unwrap.NewKafkaConnectProcessor, + "unwrap.opencdc": unwrap.NewOpenCDCProcessor, + "webhook.http": webhook.NewHTTPProcessor, +} + +type Registry struct { + logger log.CtxLogger + + // plugins stores plugin blueprints in a 2D map, first key is the plugin + // name, the second key is the plugin version + plugins map[string]map[string]blueprint +} + +type blueprint struct { + fullName plugin.FullName + specification sdk.Specification + constructor ProcessorPluginConstructor +} + +type ProcessorPluginConstructor func(log.CtxLogger) sdk.Processor + +func NewRegistry(logger log.CtxLogger, constructors map[string]ProcessorPluginConstructor) *Registry { + logger = logger.WithComponent("builtin.Registry") + buildInfo, ok := debug.ReadBuildInfo() + if !ok { + // we are using modules, build info should always be available, we are staying on the safe side + logger.Warn(context.Background()).Msg("build info not available, built-in plugin versions may not be read correctly") + buildInfo = &debug.BuildInfo{} // prevent nil pointer exceptions + } + + r := &Registry{ + plugins: loadPlugins(buildInfo, constructors), + logger: logger, + } + logger.Info(context.Background()).Int("count", len(r.List())).Msg("builtin plugins initialized") + return r +} + +func NewProcessorPluginConstructor(processorPlugin sdk.Processor) ProcessorPluginConstructor { + procType := reflect.TypeOf(processorPlugin) + for procType.Kind() != reflect.Struct { + procType.Elem() + } + + f := func(logger log.CtxLogger) sdk.Processor { + // TODO create processor plugin wrapper that injects logger into context + // before forwarding the call to the plugin + newProcValue := reflect.New(procType) + return newProcValue.Interface().(sdk.Processor) + } + + // try out f, to catch any panic early + f(log.CtxLogger{}) + + return f +} + +func loadPlugins(buildInfo *debug.BuildInfo, constructors map[string]ProcessorPluginConstructor) map[string]map[string]blueprint { + plugins := make(map[string]map[string]blueprint, len(constructors)) + for moduleName, constructor := range constructors { + specs, err := getSpecification(moduleName, constructor, buildInfo) + if err != nil { + // stop initialization if a built-in plugin is misbehaving + panic(err) + } + + versionMap := plugins[specs.Name] + if versionMap == nil { + versionMap = make(map[string]blueprint) + plugins[specs.Name] = versionMap + } + + fullName := newFullName(specs.Name, specs.Version) + if _, ok := versionMap[specs.Version]; ok { + panic(cerrors.Errorf("plugin %q already registered", fullName)) + } + + bp := blueprint{ + fullName: fullName, + constructor: constructor, + specification: specs, + } + versionMap[specs.Version] = bp + + latestBp, ok := versionMap[plugin.PluginVersionLatest] + if !ok || fullName.PluginVersionGreaterThan(latestBp.fullName) { + versionMap[plugin.PluginVersionLatest] = bp + } + } + return plugins +} + +func getSpecification(moduleName string, constructor ProcessorPluginConstructor, buildInfo *debug.BuildInfo) (sdk.Specification, error) { + procPlugin := constructor(log.CtxLogger{}) + specs, err := procPlugin.Specification() + if err != nil { + return sdk.Specification{}, err + } + + if version := getModuleVersion(buildInfo.Deps, moduleName); version != "" { + // overwrite version with the import version + specs.Version = version + } + + return specs, nil +} + +func getModuleVersion(deps []*debug.Module, moduleName string) string { + for _, dep := range deps { + if dep.Path == moduleName { + if dep.Replace != nil { + return dep.Replace.Version + } + return dep.Version + } + } + return "" +} + +func newFullName(pluginName, pluginVersion string) plugin.FullName { + return plugin.NewFullName(plugin.PluginTypeBuiltin, pluginName, pluginVersion) +} + +func (r *Registry) NewProcessor(_ context.Context, fullName plugin.FullName, _ string) (sdk.Processor, error) { + versionMap, ok := r.plugins[fullName.PluginName()] + if !ok { + return nil, plugin.ErrPluginNotFound + } + b, ok := versionMap[fullName.PluginVersion()] + if !ok { + availableVersions := make([]string, 0, len(versionMap)) + for k := range versionMap { + availableVersions = append(availableVersions, k) + } + return nil, cerrors.Errorf("could not find builtin plugin %q, only found versions %v: %w", fullName, availableVersions, plugin.ErrPluginNotFound) + } + + return b.constructor(r.logger), nil +} + +func (r *Registry) List() map[plugin.FullName]sdk.Specification { + specs := make(map[plugin.FullName]sdk.Specification, len(r.plugins)) + for _, versions := range r.plugins { + for version, bp := range versions { + if version == plugin.PluginVersionLatest { + continue // skip latest versions + } + specs[bp.fullName] = bp.specification + } + } + return specs +} diff --git a/pkg/plugin/processor/builtin/registry_test.go b/pkg/plugin/processor/builtin/registry_test.go new file mode 100644 index 000000000..3f9c55218 --- /dev/null +++ b/pkg/plugin/processor/builtin/registry_test.go @@ -0,0 +1,50 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package builtin + +import ( + "testing" + + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/processor/mock" + "github.com/matryer/is" + "go.uber.org/mock/gomock" +) + +func TestRegistry_List(t *testing.T) { + is := is.New(t) + logger := log.Nop() + + ctrl := gomock.NewController(t) + procPlugin := mock.NewProcessor(ctrl) + + procSpec := sdk.Specification{ + Name: "test-processor", + Version: "v0.1.2", + } + procPlugin.EXPECT().Specification().Return(procSpec, nil) + procConstructor := func(log.CtxLogger) sdk.Processor { return procPlugin } + + wantList := map[plugin.FullName]sdk.Specification{ + "builtin:test-processor@v0.1.2": procSpec, + } + + reg := NewRegistry(logger, map[string]ProcessorPluginConstructor{procSpec.Name: procConstructor}) + + got := reg.List() + is.Equal(got, wantList) +} diff --git a/pkg/plugin/processor/mock/processor.go b/pkg/plugin/processor/mock/processor.go new file mode 100644 index 000000000..423c85dac --- /dev/null +++ b/pkg/plugin/processor/mock/processor.go @@ -0,0 +1,126 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/conduitio/conduit-processor-sdk (interfaces: Processor) +// +// Generated by this command: +// +// mockgen -destination=mock/processor.go -package=mock -mock_names=Processor=Processor github.com/conduitio/conduit-processor-sdk Processor +// + +// Package mock is a generated GoMock package. +package mock + +import ( + context "context" + reflect "reflect" + + opencdc "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + gomock "go.uber.org/mock/gomock" +) + +// Processor is a mock of Processor interface. +type Processor struct { + sdk.UnimplementedProcessor + ctrl *gomock.Controller + recorder *ProcessorMockRecorder +} + +// ProcessorMockRecorder is the mock recorder for Processor. +type ProcessorMockRecorder struct { + mock *Processor +} + +// NewProcessor creates a new mock instance. +func NewProcessor(ctrl *gomock.Controller) *Processor { + mock := &Processor{ctrl: ctrl} + mock.recorder = &ProcessorMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *Processor) EXPECT() *ProcessorMockRecorder { + return m.recorder +} + +// Configure mocks base method. +func (m *Processor) Configure(arg0 context.Context, arg1 map[string]string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Configure", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// Configure indicates an expected call of Configure. +func (mr *ProcessorMockRecorder) Configure(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Configure", reflect.TypeOf((*Processor)(nil).Configure), arg0, arg1) +} + +// Open mocks base method. +func (m *Processor) Open(arg0 context.Context) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Open", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Open indicates an expected call of Open. +func (mr *ProcessorMockRecorder) Open(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Open", reflect.TypeOf((*Processor)(nil).Open), arg0) +} + +// Process mocks base method. +func (m *Processor) Process(arg0 context.Context, arg1 []opencdc.Record) []sdk.ProcessedRecord { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Process", arg0, arg1) + ret0, _ := ret[0].([]sdk.ProcessedRecord) + return ret0 +} + +// Process indicates an expected call of Process. +func (mr *ProcessorMockRecorder) Process(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Process", reflect.TypeOf((*Processor)(nil).Process), arg0, arg1) +} + +// Specification mocks base method. +func (m *Processor) Specification() (sdk.Specification, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Specification") + ret0, _ := ret[0].(sdk.Specification) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Specification indicates an expected call of Specification. +func (mr *ProcessorMockRecorder) Specification() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Specification", reflect.TypeOf((*Processor)(nil).Specification)) +} + +// Teardown mocks base method. +func (m *Processor) Teardown(arg0 context.Context) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Teardown", arg0) + ret0, _ := ret[0].(error) + return ret0 +} + +// Teardown indicates an expected call of Teardown. +func (mr *ProcessorMockRecorder) Teardown(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Teardown", reflect.TypeOf((*Processor)(nil).Teardown), arg0) +} + +// mustEmbedUnimplementedProcessor mocks base method. +func (m *Processor) mustEmbedUnimplementedProcessor() { + m.ctrl.T.Helper() + m.ctrl.Call(m, "mustEmbedUnimplementedProcessor") +} + +// mustEmbedUnimplementedProcessor indicates an expected call of mustEmbedUnimplementedProcessor. +func (mr *ProcessorMockRecorder) mustEmbedUnimplementedProcessor() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "mustEmbedUnimplementedProcessor", reflect.TypeOf((*Processor)(nil).mustEmbedUnimplementedProcessor)) +} diff --git a/pkg/plugin/processor/mock/registry.go b/pkg/plugin/processor/mock/registry.go new file mode 100644 index 000000000..4b343e989 --- /dev/null +++ b/pkg/plugin/processor/mock/registry.go @@ -0,0 +1,71 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: service.go +// +// Generated by this command: +// +// mockgen -source=service.go -destination=mock/registry.go -package=mock -mock_names=registry=Registry . registry +// + +// Package mock is a generated GoMock package. +package mock + +import ( + context "context" + reflect "reflect" + + sdk "github.com/conduitio/conduit-processor-sdk" + plugin "github.com/conduitio/conduit/pkg/plugin" + gomock "go.uber.org/mock/gomock" +) + +// Registry is a mock of registry interface. +type Registry struct { + ctrl *gomock.Controller + recorder *RegistryMockRecorder +} + +// RegistryMockRecorder is the mock recorder for Registry. +type RegistryMockRecorder struct { + mock *Registry +} + +// NewRegistry creates a new mock instance. +func NewRegistry(ctrl *gomock.Controller) *Registry { + mock := &Registry{ctrl: ctrl} + mock.recorder = &RegistryMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *Registry) EXPECT() *RegistryMockRecorder { + return m.recorder +} + +// List mocks base method. +func (m *Registry) List() map[plugin.FullName]sdk.Specification { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "List") + ret0, _ := ret[0].(map[plugin.FullName]sdk.Specification) + return ret0 +} + +// List indicates an expected call of List. +func (mr *RegistryMockRecorder) List() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*Registry)(nil).List)) +} + +// NewProcessor mocks base method. +func (m *Registry) NewProcessor(ctx context.Context, fullName plugin.FullName, id string) (sdk.Processor, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "NewProcessor", ctx, fullName, id) + ret0, _ := ret[0].(sdk.Processor) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// NewProcessor indicates an expected call of NewProcessor. +func (mr *RegistryMockRecorder) NewProcessor(ctx, fullName, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewProcessor", reflect.TypeOf((*Registry)(nil).NewProcessor), ctx, fullName, id) +} diff --git a/pkg/plugin/processor/processor.go b/pkg/plugin/processor/processor.go new file mode 100644 index 000000000..7e08007f7 --- /dev/null +++ b/pkg/plugin/processor/processor.go @@ -0,0 +1,21 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// This generates a mock processor and adds UnimplementedProcessor to it to +// satisfy the sdk.Processor interface. +//go:generate mockgen -destination=mock/processor.go -package=mock -mock_names=Processor=Processor github.com/conduitio/conduit-processor-sdk Processor +//go:generate sed -i.bak -e "/type Processor struct {/a\\\n sdk.UnimplementedProcessor" ./mock/processor.go +//go:generate rm ./mock/processor.go.bak + +package processor diff --git a/pkg/plugin/processor/service.go b/pkg/plugin/processor/service.go new file mode 100644 index 000000000..c6a2d0bc3 --- /dev/null +++ b/pkg/plugin/processor/service.go @@ -0,0 +1,86 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate mockgen -source=service.go -destination=mock/registry.go -package=mock -mock_names=registry=Registry . registry + +package processor + +import ( + "context" + + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin" +) + +type registry interface { + NewProcessor(ctx context.Context, fullName plugin.FullName, id string) (sdk.Processor, error) + List() map[plugin.FullName]sdk.Specification +} + +type PluginService struct { + logger log.CtxLogger + + builtinReg registry + standaloneReg registry +} + +func NewPluginService(logger log.CtxLogger, br registry, sr registry) *PluginService { + return &PluginService{ + logger: logger.WithComponent("processor.PluginService"), + builtinReg: br, + standaloneReg: sr, + } +} + +func (s *PluginService) Check(context.Context) error { + return nil +} + +func (s *PluginService) NewProcessor(ctx context.Context, pluginName string, id string) (sdk.Processor, error) { + fullName := plugin.FullName(pluginName) + switch fullName.PluginType() { + // standalone processors take precedence + // over built-in processors with the same name + case plugin.PluginTypeStandalone: + return s.standaloneReg.NewProcessor(ctx, fullName, id) + case plugin.PluginTypeBuiltin: + return s.builtinReg.NewProcessor(ctx, fullName, id) + case plugin.PluginTypeAny: + d, err := s.standaloneReg.NewProcessor(ctx, fullName, id) + if err != nil { + s.logger.Debug(ctx).Err(err).Msg("could not find standalone plugin dispenser, falling back to builtin plugin") + d, err = s.builtinReg.NewProcessor(ctx, fullName, id) + } + return d, err + default: + return nil, cerrors.Errorf("invalid plugin name prefix %q", fullName.PluginType()) + } +} + +func (s *PluginService) List(context.Context) (map[string]sdk.Specification, error) { + builtinSpecs := s.builtinReg.List() + standaloneSpecs := s.standaloneReg.List() + + specs := make(map[string]sdk.Specification, len(builtinSpecs)+len(standaloneSpecs)) + for k, v := range builtinSpecs { + specs[string(k)] = v + } + for k, v := range standaloneSpecs { + specs[string(k)] = v + } + + return specs, nil +} diff --git a/pkg/plugin/processor/service_test.go b/pkg/plugin/processor/service_test.go new file mode 100644 index 000000000..53c8006e7 --- /dev/null +++ b/pkg/plugin/processor/service_test.go @@ -0,0 +1,138 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package processor + +import ( + "context" + "testing" + + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin" + "github.com/conduitio/conduit/pkg/plugin/processor/mock" + "github.com/matryer/is" + "go.uber.org/mock/gomock" +) + +func TestPluginService_GetBuiltin_NotFound(t *testing.T) { + ctx := context.Background() + is := is.New(t) + ctrl := gomock.NewController(t) + + id := "test-id" + name := "builtin:test-processor" + + br := mock.NewRegistry(ctrl) + br.EXPECT(). + NewProcessor(gomock.Any(), plugin.FullName(name), id). + Return(nil, plugin.ErrPluginNotFound) + + sr := mock.NewRegistry(ctrl) + + underTest := NewPluginService(log.Nop(), br, sr) + got, err := underTest.NewProcessor(ctx, name, id) + is.True(cerrors.Is(err, plugin.ErrPluginNotFound)) + is.True(got == nil) +} + +func TestPluginService_GetStandalone_NotFound(t *testing.T) { + ctx := context.Background() + is := is.New(t) + ctrl := gomock.NewController(t) + + id := "test-id" + name := "standalone:test-processor" + + br := mock.NewRegistry(ctrl) + sr := mock.NewRegistry(ctrl) + sr.EXPECT(). + NewProcessor(gomock.Any(), plugin.FullName(name), id). + Return(nil, plugin.ErrPluginNotFound) + + underTest := NewPluginService(log.Nop(), br, sr) + got, err := underTest.NewProcessor(ctx, name, id) + is.True(cerrors.Is(err, plugin.ErrPluginNotFound)) + is.True(got == nil) +} + +func TestPluginService_InvalidPluginType(t *testing.T) { + ctx := context.Background() + is := is.New(t) + ctrl := gomock.NewController(t) + + br := mock.NewRegistry(ctrl) + sr := mock.NewRegistry(ctrl) + underTest := NewPluginService(log.Nop(), br, sr) + + got, err := underTest.NewProcessor(ctx, "crunchy:test-processor", "test-id") + is.True(err != nil) + is.Equal("invalid plugin name prefix \"crunchy\"", err.Error()) + is.True(got == nil) +} + +func TestPluginService_Get(t *testing.T) { + ctx := context.Background() + + testCases := []struct { + name string + procName string + setup func(br *mock.Registry, sr *mock.Registry, proc *mock.Processor) + }{ + { + name: "get built-in", + procName: "builtin:test-processor", + setup: func(br *mock.Registry, sr *mock.Registry, proc *mock.Processor) { + br.EXPECT(). + NewProcessor(gomock.Any(), plugin.FullName("builtin:test-processor"), "test-id"). + Return(proc, nil) + }, + }, + { + name: "get standalone", + procName: "standalone:test-processor", + setup: func(br *mock.Registry, sr *mock.Registry, proc *mock.Processor) { + sr.EXPECT(). + NewProcessor(gomock.Any(), plugin.FullName("standalone:test-processor"), "test-id"). + Return(proc, nil) + }, + }, + { + name: "standalone preferred", + procName: "test-processor", + setup: func(br *mock.Registry, sr *mock.Registry, proc *mock.Processor) { + sr.EXPECT(). + NewProcessor(gomock.Any(), plugin.FullName("test-processor"), "test-id"). + Return(proc, nil) + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + ctrl := gomock.NewController(t) + + want := mock.NewProcessor(ctrl) + br := mock.NewRegistry(ctrl) + sr := mock.NewRegistry(ctrl) + tc.setup(br, sr, want) + + underTest := NewPluginService(log.Nop(), br, sr) + got, err := underTest.NewProcessor(ctx, tc.procName, "test-id") + is.NoErr(err) + is.Equal(want, got) + }) + } +} diff --git a/pkg/plugin/processor/standalone/host_module.go b/pkg/plugin/processor/standalone/host_module.go new file mode 100644 index 000000000..b0e1abcc8 --- /dev/null +++ b/pkg/plugin/processor/standalone/host_module.go @@ -0,0 +1,147 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standalone + +import ( + "context" + + processorv1 "github.com/conduitio/conduit-processor-sdk/proto/processor/v1" + "github.com/conduitio/conduit-processor-sdk/wasm" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/stealthrocket/wazergo" + "github.com/stealthrocket/wazergo/types" + "google.golang.org/protobuf/proto" +) + +// hostModule declares the host module that is exported to the WASM module. The +// host module is used to communicate between the WASM module (processor) and Conduit. +var hostModule wazergo.HostModule[*hostModuleInstance] = hostModuleFunctions{ + "command_request": wazergo.F1((*hostModuleInstance).commandRequest), + "command_response": wazergo.F1((*hostModuleInstance).commandResponse), +} + +// hostModuleFunctions type implements HostModule, providing the module name, +// map of exported functions, and the ability to create instances of the module +// type. +type hostModuleFunctions wazergo.Functions[*hostModuleInstance] + +// Name returns the name of the module. +func (f hostModuleFunctions) Name() string { + return "conduit" +} + +// Functions is a helper that returns the exported functions of the module. +func (f hostModuleFunctions) Functions() wazergo.Functions[*hostModuleInstance] { + return (wazergo.Functions[*hostModuleInstance])(f) +} + +// Instantiate creates a new instance of the module. This is called by the +// runtime when a new instance of the module is created. +func (f hostModuleFunctions) Instantiate(_ context.Context, opts ...hostModuleOption) (*hostModuleInstance, error) { + mod := &hostModuleInstance{} + wazergo.Configure(mod, opts...) + if mod.commandRequests == nil { + return nil, cerrors.New("missing command requests channel") + } + if mod.commandResponses == nil { + return nil, cerrors.New("missing command responses channel") + } + return mod, nil +} + +type hostModuleOption = wazergo.Option[*hostModuleInstance] + +func hostModuleOptions( + logger log.CtxLogger, + requests <-chan *processorv1.CommandRequest, + responses chan<- tuple[*processorv1.CommandResponse, error], +) hostModuleOption { + return wazergo.OptionFunc(func(m *hostModuleInstance) { + m.logger = logger + m.commandRequests = requests + m.commandResponses = responses + }) +} + +// hostModuleInstance is used to maintain the state of our module instance. +type hostModuleInstance struct { + logger log.CtxLogger + commandRequests <-chan *processorv1.CommandRequest + commandResponses chan<- tuple[*processorv1.CommandResponse, error] + + parkedCommandRequest *processorv1.CommandRequest +} + +func (*hostModuleInstance) Close(context.Context) error { return nil } + +// commandRequest is the exported function that is called by the WASM module to +// get the next command request. It returns the size of the command request +// message. If the buffer is too small, it returns the size of the command +// request message and parks the command request. The next call to this function +// will return the same command request. +func (m *hostModuleInstance) commandRequest(ctx context.Context, buf types.Bytes) types.Uint32 { + m.logger.Trace(ctx).Msg("executing command_request") + + if m.parkedCommandRequest == nil { + // No parked command, so we need to wait for the next one. If the command + // channel is closed, then we return an error. + var ok bool + m.parkedCommandRequest, ok = <-m.commandRequests + if !ok { + return wasm.ErrorCodeNoMoreCommands + } + } + + // If the buffer is too small, we park the command and return the size of the + // command. The next call to nextCommand will return the same command. + if size := proto.Size(m.parkedCommandRequest); len(buf) < size { + m.logger.Warn(ctx). + Int("command_bytes", size). + Int("allocated_bytes", len(buf)). + Msgf("insufficient memory, command will be parked until next call to command_request") + return types.Uint32(size) + } + + // If the buffer is large enough, we marshal the command into the buffer and + // return the size of the command. The next call to nextCommand will return + // the next command. + out, err := proto.MarshalOptions{}.MarshalAppend(buf[:0], m.parkedCommandRequest) + if err != nil { + m.logger.Err(ctx, err).Msg("failed marshalling protobuf command request") + return wasm.ErrorCodeUnknownCommandRequest + } + m.parkedCommandRequest = nil + + m.logger.Trace(ctx).Msg("returning next command") + return types.Uint32(len(out)) +} + +// commandResponse is the exported function that is called by the WASM module to +// send a command response. It returns 0 on success, or an error code on error. +func (m *hostModuleInstance) commandResponse(ctx context.Context, buf types.Bytes) types.Uint32 { + m.logger.Trace(ctx).Msg("executing command_response") + + var resp processorv1.CommandResponse + err := proto.Unmarshal(buf, &resp) + if err != nil { + m.logger.Err(ctx, err).Msg("failed unmarshalling protobuf command response") + m.commandResponses <- tuple[*processorv1.CommandResponse, error]{nil, err} + return wasm.ErrorCodeUnknownCommandResponse + } + + m.commandResponses <- tuple[*processorv1.CommandResponse, error]{&resp, nil} + return 0 +} diff --git a/pkg/plugin/processor/standalone/logger.go b/pkg/plugin/processor/standalone/logger.go new file mode 100644 index 000000000..eabc3610d --- /dev/null +++ b/pkg/plugin/processor/standalone/logger.go @@ -0,0 +1,103 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standalone + +import ( + "io" + + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/tetratelabs/wazero" + + "github.com/goccy/go-json" + "github.com/rs/zerolog" +) + +// wasmLogWriter is a logger adapter for the WASM stderr and stdout streams. +// It parses the JSON log events and emits them as structured logs. It expects +// the log events to be in the default format produced by zerolog. If the +// parsing fails, it falls back to writing the raw bytes as-is. +type wasmLogWriter struct { + logger zerolog.Logger +} + +var _ io.Writer = (*wasmLogWriter)(nil) + +func newWasmLogWriter(logger log.CtxLogger, module wazero.CompiledModule) wasmLogWriter { + name := module.Name() + if name == "" { + // no module name, use the component name instead + name = logger.Component() + ".module" + } + logger = logger.WithComponent(name) + return wasmLogWriter{logger: logger.ZerologWithComponent()} +} + +func (l wasmLogWriter) Write(p []byte) (int, error) { + err := l.emitJSONEvent(p) + if err != nil { + // fallback to writing the bytes as-is + return l.logger.Write(p) + } + return len(p), nil +} + +func (l wasmLogWriter) emitJSONEvent(p []byte) error { + var raw map[string]any + err := json.Unmarshal(p, &raw) + if err != nil { + return err + } + + var ( + level = zerolog.DebugLevel // default + msg = "" + ) + + // parse level + if v, ok := raw[zerolog.LevelFieldName]; ok { + delete(raw, zerolog.LevelFieldName) + if s, ok := v.(string); ok { + parsedLvl, err := zerolog.ParseLevel(s) + if err == nil { + level = parsedLvl + } + } + } + + // prepare log event + e := l.logger.WithLevel(level) + if !e.Enabled() { + return nil + } + + // parse message + if v, ok := raw[zerolog.MessageFieldName]; ok { + delete(raw, zerolog.MessageFieldName) + if s, ok := v.(string); ok { + msg = s + } + } + + // don't duplicate timestamp, it's added by zerolog + delete(raw, zerolog.TimestampFieldName) + + // parse unknown fields + for k, v := range raw { + e.Any(k, v) + } + + e.Msg(msg) + return nil +} diff --git a/pkg/plugin/processor/standalone/processor.go b/pkg/plugin/processor/standalone/processor.go new file mode 100644 index 000000000..b0a12ff12 --- /dev/null +++ b/pkg/plugin/processor/standalone/processor.go @@ -0,0 +1,360 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standalone + +import ( + "context" + "fmt" + "time" + + "github.com/conduitio/conduit/pkg/plugin" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + processorv1 "github.com/conduitio/conduit-processor-sdk/proto/processor/v1" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/stealthrocket/wazergo" + "github.com/tetratelabs/wazero" + "github.com/tetratelabs/wazero/api" + "github.com/tetratelabs/wazero/sys" +) + +const ( + // magicCookieKey and value are used as a very basic verification + // that a plugin is intended to be launched. This is not a security + // measure, just a UX feature. If the magic cookie doesn't match, + // we show human-friendly output. + magicCookieKey = "CONDUIT_MAGIC_COOKIE" + magicCookieValue = "3stnegqd0x02axggy0vrc4izjeq2zik6g7somyb3ye4vy5iivvjm5s1edppl5oja" + + conduitProcessorIDKey = "CONDUIT_PROCESSOR_ID" + conduitLogLevelKey = "CONDUIT_LOG_LEVEL" +) + +type wasmProcessor struct { + sdk.UnimplementedProcessor + protoconv protoConverter + + id string + logger log.CtxLogger + + // module is the WASM module that implements the processor + module api.Module + + // commandRequests is used to send commands to the actual processor (the + // WASM module) + commandRequests chan *processorv1.CommandRequest + // commandResponses is used to communicate replies between the actual + // processor (the WASM module) and wasmProcessor + commandResponses chan tuple[*processorv1.CommandResponse, error] + + // moduleStopped is used to know when the module stopped running + moduleStopped chan struct{} + // moduleError contains the error returned by the module after it stopped + moduleError error +} + +type tuple[T1, T2 any] struct { + V1 T1 + V2 T2 +} + +func newWASMProcessor( + ctx context.Context, + + runtime wazero.Runtime, + processorModule wazero.CompiledModule, + hostModule *wazergo.CompiledModule[*hostModuleInstance], + + id string, + logger log.CtxLogger, +) (*wasmProcessor, error) { + logger = logger.WithComponent("standalone.wasmProcessor") + logger.Logger = logger.With().Str(log.ProcessorIDField, id).Logger() + wasmLogger := newWasmLogWriter(logger, processorModule) + + commandRequests := make(chan *processorv1.CommandRequest) + commandResponses := make(chan tuple[*processorv1.CommandResponse, error]) + moduleStopped := make(chan struct{}) + + // instantiate conduit host module and inject it into the context + logger.Debug(ctx).Msg("instantiating conduit host module") + ins, err := hostModule.Instantiate( + ctx, + hostModuleOptions( + logger, + commandRequests, + commandResponses, + ), + ) + if err != nil { + return nil, fmt.Errorf("failed to instantiate conduit host module: %w", err) + } + ctx = wazergo.WithModuleInstance(ctx, ins) + + logger.Debug(ctx).Msg("instantiating processor module") + mod, err := runtime.InstantiateModule( + ctx, + processorModule, + wazero.NewModuleConfig(). + WithName(id). // ensure unique module name + WithEnv(magicCookieKey, magicCookieValue). + WithEnv(conduitProcessorIDKey, id). + WithEnv(conduitLogLevelKey, logger.GetLevel().String()). + + // set up logging + WithStdout(wasmLogger). + WithStderr(wasmLogger). + + // enable time.Now to include correct wall time + WithSysWalltime(). + // enable time.Now to include correct monotonic time + WithSysNanotime(). + // enable time.Sleep to sleep for the correct amount of time + WithSysNanosleep(). + + // don't start right away + WithStartFunctions(), + ) + if err != nil { + return nil, fmt.Errorf("failed to instantiate processor module: %w", err) + } + + p := &wasmProcessor{ + id: id, + logger: logger, + module: mod, + commandRequests: commandRequests, + commandResponses: commandResponses, + moduleStopped: moduleStopped, + } + + // Needs to run in a goroutine because the WASM module is blocking as long + // as the "main" function is running + go p.run(ctx) + + return p, nil +} + +// run is the main loop of the WASM module. It runs in a goroutine and blocks +// until the module is closed. +func (p *wasmProcessor) run(ctx context.Context) { + defer close(p.moduleStopped) + + _, err := p.module.ExportedFunction("_start").Call(ctx) + + // main function returned, close the module right away + _ = p.module.Close(ctx) + + if err != nil { + var exitErr *sys.ExitError + if cerrors.As(err, &exitErr) { + if exitErr.ExitCode() == 0 { // All good + err = nil + } + } + } + + p.moduleError = err + p.logger.Err(ctx, err).Msg("WASM module stopped") +} + +func (p *wasmProcessor) Specification() (sdk.Specification, error) { + req := &processorv1.CommandRequest{ + Request: &processorv1.CommandRequest_Specify{ + Specify: &processorv1.Specify_Request{}, + }, + } + + // the function has no context parameter, so we need to set a timeout + ctx, cancel := context.WithTimeout(context.Background(), time.Second*5) + defer cancel() + + resp, err := p.executeCommand(ctx, req) + if err != nil { + return sdk.Specification{}, err + } + + switch specResp := resp.Response.(type) { + case *processorv1.CommandResponse_Specify: + return p.protoconv.specification(specResp.Specify) + default: + return sdk.Specification{}, fmt.Errorf("unexpected response type: %T", resp) + } +} + +func (p *wasmProcessor) Configure(ctx context.Context, config map[string]string) error { + req := &processorv1.CommandRequest{ + Request: &processorv1.CommandRequest_Configure{ + Configure: &processorv1.Configure_Request{ + Parameters: config, + }, + }, + } + + resp, err := p.executeCommand(ctx, req) + if err != nil { + return err + } + + switch resp.Response.(type) { + case *processorv1.CommandResponse_Configure: + return nil + default: + return fmt.Errorf("unexpected response type: %T", resp) + } +} + +func (p *wasmProcessor) Open(ctx context.Context) error { + req := &processorv1.CommandRequest{ + Request: &processorv1.CommandRequest_Open{ + Open: &processorv1.Open_Request{}, + }, + } + + resp, err := p.executeCommand(ctx, req) + if err != nil { + return err + } + + switch resp.Response.(type) { + case *processorv1.CommandResponse_Open: + return nil + default: + return fmt.Errorf("unexpected response type: %T", resp) + } +} + +func (p *wasmProcessor) Process(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + protoRecords, err := p.protoconv.records(records) + if err != nil { + p.logger.Err(ctx, err).Msg("failed to convert records to proto") + return []sdk.ProcessedRecord{sdk.ErrorRecord{Error: err}} + } + + req := &processorv1.CommandRequest{ + Request: &processorv1.CommandRequest_Process{ + Process: &processorv1.Process_Request{ + Records: protoRecords, + }, + }, + } + + resp, err := p.executeCommand(ctx, req) + if err != nil { + return []sdk.ProcessedRecord{sdk.ErrorRecord{Error: err}} + } + + switch procResp := resp.Response.(type) { + case *processorv1.CommandResponse_Process: + processedRecords, err := p.protoconv.processedRecords(procResp.Process.Records) + if err != nil { + p.logger.Err(ctx, err).Msg("failed to convert processed records from proto") + return []sdk.ProcessedRecord{sdk.ErrorRecord{Error: err}} + } + return processedRecords + default: + err := fmt.Errorf("unexpected response type: %T", resp) + return []sdk.ProcessedRecord{sdk.ErrorRecord{Error: err}} + } +} + +func (p *wasmProcessor) Teardown(ctx context.Context) error { + // TODO: we should probably have a timeout for the teardown command in case + // the plugin is stuck + teardownErr := p.executeTeardownCommand(ctx) + // close module regardless of teardown error + stopErr := p.closeModule(ctx) + + return cerrors.Join(teardownErr, stopErr, p.moduleError) +} + +func (p *wasmProcessor) executeTeardownCommand(ctx context.Context) error { + req := &processorv1.CommandRequest{ + Request: &processorv1.CommandRequest_Teardown{ + Teardown: &processorv1.Teardown_Request{}, + }, + } + + resp, err := p.executeCommand(ctx, req) + + if err != nil { + return err + } + switch resp.Response.(type) { + case *processorv1.CommandResponse_Teardown: + return nil + default: + return fmt.Errorf("unexpected response type: %T", resp) + } +} + +func (p *wasmProcessor) closeModule(ctx context.Context) error { + // Closing the command channel will send an error code to the WASM module + // signaling it to exit. + close(p.commandRequests) + + select { + case <-ctx.Done(): + // kill the plugin + p.logger.Error(ctx).Msg("context canceled while waiting for teardown, killing plugin") + err := p.module.CloseWithExitCode(ctx, 1) + if err != nil { + return fmt.Errorf("failed to kill processor plugin: %w", err) + } + return ctx.Err() + case <-p.moduleStopped: + return nil + } +} + +// executeCommand sends a command request to the WASM module and waits for the +// response. It returns the response, or an error if the response is an error. +// If the context is canceled, it returns ctx.Err(). +func (p *wasmProcessor) executeCommand(ctx context.Context, req *processorv1.CommandRequest) (*processorv1.CommandResponse, error) { + select { + case <-ctx.Done(): + return nil, ctx.Err() + case <-p.moduleStopped: + return nil, cerrors.Errorf("processor plugin stopped while trying to send command %T: %w", req.Request, plugin.ErrPluginNotRunning) + case p.commandRequests <- req: + } + + // wait for the response from the WASM module + var resp *processorv1.CommandResponse + var err error + select { + case <-ctx.Done(): + // TODO if this happens we should probably kill the plugin, as it's + // probably stuck + return nil, ctx.Err() + case <-p.moduleStopped: + return nil, cerrors.Errorf("processor plugin stopped while waiting for response to command %T: %w", req.Request, plugin.ErrPluginNotRunning) + case crTuple := <-p.commandResponses: + resp, err = crTuple.V1, crTuple.V2 + } + + if err != nil { + return nil, err + } + + // check if the response is an error + if errResp, ok := resp.Response.(*processorv1.CommandResponse_Error); ok { + return nil, p.protoconv.error(errResp.Error) + } + + return resp, nil +} diff --git a/pkg/plugin/processor/standalone/processor_test.go b/pkg/plugin/processor/standalone/processor_test.go new file mode 100644 index 000000000..ffcb49a69 --- /dev/null +++ b/pkg/plugin/processor/standalone/processor_test.go @@ -0,0 +1,238 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standalone + +import ( + "context" + "testing" + + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/plugin" + + "github.com/conduitio/conduit-commons/opencdc" + + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit-processor-sdk/wasm" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/matryer/is" +) + +func TestWASMProcessor_Specification_Success(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + gotSpec, err := underTest.Specification() + is.NoErr(err) + + wantSpec := ChaosProcessorSpecifications() + is.Equal(gotSpec, wantSpec) + + is.NoErr(underTest.Teardown(ctx)) +} + +func TestWASMProcessor_Specification_Error(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, SpecifyErrorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + _, err = underTest.Specification() + is.Equal(err, wasm.NewError(0, "boom")) + + // Teardown still works + is.NoErr(underTest.Teardown(ctx)) +} + +func TestWASMProcessor_Configure_Success(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + err = underTest.Configure(ctx, nil) + is.NoErr(err) + + is.NoErr(underTest.Teardown(ctx)) +} + +func TestWASMProcessor_Configure_Error(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + err = underTest.Configure(ctx, map[string]string{"configure": "error"}) + is.Equal(err, wasm.NewError(0, "boom")) + + // Teardown still works + is.NoErr(underTest.Teardown(ctx)) +} + +func TestWASMProcessor_Configure_Panic(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + err = underTest.Configure(ctx, map[string]string{"configure": "panic"}) + is.True(cerrors.Is(err, plugin.ErrPluginNotRunning)) + + // Teardown should also fail with the same error + err = underTest.Teardown(ctx) + is.True(cerrors.Is(err, plugin.ErrPluginNotRunning)) +} + +func TestWASMProcessor_Open_Success(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + err = underTest.Open(ctx) + is.NoErr(err) + + is.NoErr(underTest.Teardown(ctx)) +} + +func TestWASMProcessor_Open_Error(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + err = underTest.Configure(ctx, map[string]string{"open": "error"}) + is.NoErr(err) + + err = underTest.Open(ctx) + is.Equal(err, wasm.NewError(0, "boom")) + + // Teardown still works + is.NoErr(underTest.Teardown(ctx)) +} + +func TestWASMProcessor_Open_Panic(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + err = underTest.Configure(ctx, map[string]string{"open": "panic"}) + is.NoErr(err) + + err = underTest.Open(ctx) + is.True(cerrors.Is(err, plugin.ErrPluginNotRunning)) + + // Teardown should also fail with the same error + err = underTest.Teardown(ctx) + is.True(cerrors.Is(err, plugin.ErrPluginNotRunning)) +} + +func TestWASMProcessor_Process_Success(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + is.NoErr(underTest.Configure(ctx, map[string]string{"process.prefix": "hello!\n\n"})) + + processed := underTest.Process(ctx, nil) + is.Equal(0, len(processed)) + + processed = underTest.Process(ctx, []opencdc.Record{}) + is.Equal(0, len(processed)) + + input := opencdc.Record{ + Position: opencdc.Position("first left then right"), + Operation: opencdc.OperationCreate, + Metadata: map[string]string{ + "street": "23rd", + }, + Key: opencdc.RawData("broken"), + Payload: opencdc.Change{ + After: opencdc.RawData("oranges"), + }, + } + want := sdk.SingleRecord(input.Clone()) + want.Payload.After = opencdc.RawData("hello!\n\n" + string(want.Payload.After.Bytes())) + + processed = underTest.Process(ctx, []opencdc.Record{input}) + is.Equal(1, len(processed)) + is.Equal(want, processed[0]) + + is.NoErr(underTest.Teardown(ctx)) +} + +func TestWASMProcessor_Process_Error(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + is.NoErr(underTest.Configure(ctx, map[string]string{"process": "error"})) + + processed := underTest.Process(ctx, nil) + is.Equal(1, len(processed)) + + errRecord, ok := processed[0].(sdk.ErrorRecord) + is.True(ok) + is.Equal(errRecord.Error, wasm.NewError(0, "boom")) + + // Teardown still works + is.NoErr(underTest.Teardown(ctx)) +} + +func TestWASMProcessor_Process_Panic(t *testing.T) { + is := is.New(t) + ctx := context.Background() + logger := log.Test(t) + + underTest, err := newWASMProcessor(ctx, TestRuntime, ChaosProcessorModule, CompiledHostModule, "test-processor", logger) + is.NoErr(err) + + is.NoErr(underTest.Configure(ctx, map[string]string{"process": "panic"})) + + processed := underTest.Process(ctx, nil) + is.Equal(1, len(processed)) + + errRecord, ok := processed[0].(sdk.ErrorRecord) + is.True(ok) + is.True(cerrors.Is(errRecord.Error, plugin.ErrPluginNotRunning)) + + // Teardown should also fail with the same error + err = underTest.Teardown(ctx) + is.True(cerrors.Is(err, plugin.ErrPluginNotRunning)) +} diff --git a/pkg/plugin/processor/standalone/proto.go b/pkg/plugin/processor/standalone/proto.go new file mode 100644 index 000000000..4ffceabee --- /dev/null +++ b/pkg/plugin/processor/standalone/proto.go @@ -0,0 +1,126 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standalone + +import ( + "fmt" + + "github.com/conduitio/conduit-commons/config" + "github.com/conduitio/conduit-commons/opencdc" + opencdcv1 "github.com/conduitio/conduit-commons/proto/opencdc/v1" + sdk "github.com/conduitio/conduit-processor-sdk" + processorv1 "github.com/conduitio/conduit-processor-sdk/proto/processor/v1" + "github.com/conduitio/conduit-processor-sdk/wasm" +) + +// protoConverter converts between the SDK and protobuf types. +type protoConverter struct{} + +func (c protoConverter) specification(resp *processorv1.Specify_Response) (sdk.Specification, error) { + params := make(config.Parameters, len(resp.Parameters)) + err := params.FromProto(resp.Parameters) + if err != nil { + return sdk.Specification{}, err + } + + return sdk.Specification{ + Name: resp.Name, + Summary: resp.Summary, + Description: resp.Description, + Version: resp.Version, + Author: resp.Author, + Parameters: params, + }, nil +} + +func (c protoConverter) records(in []opencdc.Record) ([]*opencdcv1.Record, error) { + if in == nil { + return nil, nil + } + + out := make([]*opencdcv1.Record, len(in)) + for i, r := range in { + out[i] = &opencdcv1.Record{} + err := r.ToProto(out[i]) + if err != nil { + return nil, err + } + } + + return out, nil +} + +func (c protoConverter) processedRecords(in []*processorv1.Process_ProcessedRecord) ([]sdk.ProcessedRecord, error) { + if in == nil { + return nil, nil + } + + out := make([]sdk.ProcessedRecord, len(in)) + var err error + for i, r := range in { + out[i], err = c.processedRecord(r) + if err != nil { + return nil, err + } + } + + return out, nil +} + +func (c protoConverter) processedRecord(in *processorv1.Process_ProcessedRecord) (sdk.ProcessedRecord, error) { + if in == nil || in.Record == nil { + return nil, nil + } + + switch v := in.Record.(type) { + case *processorv1.Process_ProcessedRecord_SingleRecord: + return c.singleRecord(v) + case *processorv1.Process_ProcessedRecord_FilterRecord: + return c.filterRecord(v) + case *processorv1.Process_ProcessedRecord_ErrorRecord: + return c.errorRecord(v) + default: + return nil, fmt.Errorf("unknown processed record type: %T", in.Record) + } +} + +func (c protoConverter) singleRecord(in *processorv1.Process_ProcessedRecord_SingleRecord) (sdk.SingleRecord, error) { + if in == nil { + return sdk.SingleRecord{}, nil + } + + var rec opencdc.Record + err := rec.FromProto(in.SingleRecord) + if err != nil { + return sdk.SingleRecord{}, err + } + + return sdk.SingleRecord(rec), nil +} + +func (c protoConverter) filterRecord(_ *processorv1.Process_ProcessedRecord_FilterRecord) (sdk.FilterRecord, error) { + return sdk.FilterRecord{}, nil +} + +func (c protoConverter) errorRecord(in *processorv1.Process_ProcessedRecord_ErrorRecord) (sdk.ErrorRecord, error) { + if in == nil || in.ErrorRecord == nil || in.ErrorRecord.Error == nil { + return sdk.ErrorRecord{}, nil + } + return sdk.ErrorRecord{Error: c.error(in.ErrorRecord.Error)}, nil +} + +func (c protoConverter) error(e *processorv1.Error) error { + return wasm.NewError(e.Code, e.Message) +} diff --git a/pkg/plugin/processor/standalone/registry.go b/pkg/plugin/processor/standalone/registry.go new file mode 100644 index 000000000..162473d63 --- /dev/null +++ b/pkg/plugin/processor/standalone/registry.go @@ -0,0 +1,285 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standalone + +import ( + "context" + "fmt" + "os" + "path" + "path/filepath" + "sync" + + "github.com/tetratelabs/wazero/imports/wasi_snapshot_preview1" + + "github.com/stealthrocket/wazergo" + + "github.com/tetratelabs/wazero" + + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin" +) + +// Registry is a directory registry of processor plugins, organized by plugin +// type, name and version. +// Every file in the specified directory is considered a plugin +// (directories are skipped). +type Registry struct { + logger log.CtxLogger + pluginDir string + runtime wazero.Runtime + + // hostModule is the conduit host module that exposes Conduit host functions + // to the WASM module. The host module is compiled once and instantiated + // multiple times, once for each WASM module. + hostModule *wazergo.CompiledModule[*hostModuleInstance] + + // plugins stores plugin blueprints in a 2D map, first key is the plugin + // name, the second key is the plugin version + plugins map[string]map[string]blueprint + // m guards plugins from being concurrently accessed + m sync.RWMutex +} + +type blueprint struct { + fullName plugin.FullName + specification sdk.Specification + path string + module wazero.CompiledModule + // TODO store hash of plugin binary and compare before running the binary to + // ensure someone can't switch the plugin after we registered it +} + +func NewRegistry(logger log.CtxLogger, pluginDir string) (*Registry, error) { + // context is only used for logging, it's not used for long running operations + ctx := context.Background() + + logger = logger.WithComponentFromType(Registry{}) + + if pluginDir != "" { + // extract absolute path to make it clearer in the logs what directory is used + absPluginDir, err := filepath.Abs(pluginDir) + if err != nil { + logger.Warn(ctx).Err(err).Msg("could not extract absolute processor plugins path") + } else { + pluginDir = absPluginDir + } + } + + // we are using the wasm compiler, context is not used + runtime := wazero.NewRuntime(ctx) + // TODO close runtime on shutdown + + _, err := wasi_snapshot_preview1.Instantiate(ctx, runtime) + if err != nil { + _ = runtime.Close(ctx) + return nil, cerrors.Errorf("failed to instantiate WASI: %w", err) + } + + // init host module + compiledHostModule, err := wazergo.Compile(ctx, runtime, hostModule) + if err != nil { + _ = runtime.Close(ctx) + return nil, cerrors.Errorf("failed to compile host module: %w", err) + } + + r := &Registry{ + logger: logger, + runtime: runtime, + hostModule: compiledHostModule, + pluginDir: pluginDir, + } + + r.reloadPlugins() + r.logger.Info(context.Background()). + Str(log.PluginPathField, r.pluginDir). + Int("count", len(r.List())). + Msg("standalone processor plugins initialized") + + return r, nil +} + +func (r *Registry) NewProcessor(ctx context.Context, fullName plugin.FullName, id string) (sdk.Processor, error) { + r.m.RLock() + defer r.m.RUnlock() + + versions, ok := r.plugins[fullName.PluginName()] + if !ok { + return nil, plugin.ErrPluginNotFound + } + bp, ok := versions[fullName.PluginVersion()] + if !ok { + availableVersions := make([]string, 0, len(versions)) + for k := range versions { + availableVersions = append(availableVersions, k) + } + return nil, cerrors.Errorf("could not find standalone processor plugin, only found versions %v: %w", availableVersions, plugin.ErrPluginNotFound) + } + + p, err := newWASMProcessor(ctx, r.runtime, bp.module, r.hostModule, id, r.logger) + if err != nil { + return nil, cerrors.Errorf("failed to create a new WASM processor: %w", err) + } + + return p, nil +} + +func (r *Registry) reloadPlugins() { + if r.pluginDir == "" { + return // no plugin dir, no plugins to load + } + + plugins := r.loadPlugins(context.Background(), r.pluginDir) + r.m.Lock() + r.plugins = plugins + r.m.Unlock() +} + +func (r *Registry) loadPlugins(ctx context.Context, pluginDir string) map[string]map[string]blueprint { + r.logger.Info(ctx).Msgf("loading processor plugins from directory %v ...", pluginDir) + plugins := make(map[string]map[string]blueprint) + + dirEntries, err := os.ReadDir(pluginDir) + if err != nil { + r.logger.Warn(ctx).Err(err).Msg("could not read processor plugin directory") + return plugins // return empty map + } + warn := func(ctx context.Context, err error, pluginPath string) { + r.logger.Warn(ctx). + Err(err). + Str(log.PluginPathField, pluginPath). + Msgf("could not load standalone processor plugin") + } + + for _, dirEntry := range dirEntries { + if dirEntry.IsDir() { + // skip directories + continue + } + + pluginPath := path.Join(pluginDir, dirEntry.Name()) + + // create dispenser without a logger to not spam logs on refresh + module, specs, err := r.loadModuleAndSpecifications(ctx, pluginPath) + if err != nil { + warn(ctx, err, pluginPath) + continue + } + + versionMap := plugins[specs.Name] + if versionMap == nil { + versionMap = make(map[string]blueprint) + plugins[specs.Name] = versionMap + } + + fullName := plugin.NewFullName(plugin.PluginTypeStandalone, specs.Name, specs.Version) + if conflict, ok := versionMap[specs.Version]; ok { + err = cerrors.Errorf("conflict detected, processor plugin %v already registered, please remove either %v or %v, these plugins won't be usable until that happens", fullName, conflict.path, pluginPath) + warn(ctx, err, pluginPath) + // close module as we won't use it + _ = module.Close(ctx) + // delete plugin from map at the end so that further duplicates can + // still be found + defer func() { + delete(versionMap, specs.Version) + if len(versionMap) == 0 { + delete(plugins, specs.Name) + } + }() + continue + } + + bp := blueprint{ + fullName: fullName, + specification: specs, + path: pluginPath, + module: module, + } + versionMap[specs.Version] = bp + + latestFullName := versionMap[plugin.PluginVersionLatest].fullName + if fullName.PluginVersionGreaterThan(latestFullName) { + versionMap[plugin.PluginVersionLatest] = bp + r.logger.Debug(ctx). + Str(log.PluginPathField, pluginPath). + Str(log.PluginNameField, string(bp.fullName)). + Msg("set processor plugin as latest") + } + + r.logger.Debug(ctx). + Str(log.PluginPathField, pluginPath). + Str(log.PluginNameField, string(bp.fullName)). + Msg("loaded standalone processor plugin") + } + + return plugins +} + +func (r *Registry) loadModuleAndSpecifications(ctx context.Context, pluginPath string) (_ wazero.CompiledModule, _ sdk.Specification, err error) { + wasmBytes, err := os.ReadFile(pluginPath) + if err != nil { + return nil, sdk.Specification{}, fmt.Errorf("failed to read WASM file %q: %w", pluginPath, err) + } + + r.logger.Debug(ctx). + Str("path", pluginPath). + Msg("compiling WASM module") + + module, err := r.runtime.CompileModule(ctx, wasmBytes) + if err != nil { + return nil, sdk.Specification{}, fmt.Errorf("failed to compile WASM module: %w", err) + } + defer func() { + if err != nil { + _ = module.Close(ctx) + } + }() + + p, err := newWASMProcessor(ctx, r.runtime, module, r.hostModule, "init-processor", log.Nop()) + if err != nil { + return nil, sdk.Specification{}, fmt.Errorf("failed to create a new WASM processor: %w", err) + } + defer func() { + err := p.Teardown(ctx) + if err != nil { + r.logger.Warn(ctx).Err(err).Msg("processor teardown failed") + } + }() + + specs, err := p.Specification() + if err != nil { + return nil, sdk.Specification{}, err + } + + return module, specs, nil +} + +func (r *Registry) List() map[plugin.FullName]sdk.Specification { + r.m.RLock() + defer r.m.RUnlock() + + specs := make(map[plugin.FullName]sdk.Specification, len(r.plugins)) + for _, versions := range r.plugins { + for version, bp := range versions { + if version == plugin.PluginVersionLatest { + continue // skip latest versions + } + specs[bp.fullName] = bp.specification + } + } + return specs +} diff --git a/pkg/plugin/processor/standalone/registry_test.go b/pkg/plugin/processor/standalone/registry_test.go new file mode 100644 index 000000000..702e9f258 --- /dev/null +++ b/pkg/plugin/processor/standalone/registry_test.go @@ -0,0 +1,141 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standalone + +import ( + "context" + "fmt" + "testing" + "time" + + "github.com/conduitio/conduit-commons/csync" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/plugin" + "github.com/google/uuid" + "github.com/matryer/is" +) + +func TestRegistry_List(t *testing.T) { + is := is.New(t) + + underTest, err := NewRegistry(log.Test(t), testPluginChaosDir) + is.NoErr(err) + list := underTest.List() + is.Equal(1, len(list)) + got, ok := list["standalone:chaos-processor@v1.3.5"] + is.True(ok) // expected spec for standalone:chaos-processor@v1.3.5 + + want := ChaosProcessorSpecifications() + + is.Equal(got, want) +} + +func TestRegistry_MalformedProcessor(t *testing.T) { + is := is.New(t) + + underTest, err := NewRegistry(log.Test(t), testPluginMalformedDir) + is.NoErr(err) + list := underTest.List() + is.Equal(0, len(list)) +} + +func TestRegistry_SpecifyError(t *testing.T) { + is := is.New(t) + + underTest, err := NewRegistry(log.Test(t), testPluginSpecifyErrorDir) + is.NoErr(err) + list := underTest.List() + is.Equal(0, len(list)) +} + +func TestRegistry_ChaosProcessor(t *testing.T) { + ctx := context.Background() + is := is.New(t) + + // reuse this registry for multiple tests, because it's expensive to create + underTest, err := NewRegistry(log.Nop(), testPluginChaosDir) + is.NoErr(err) + + const standaloneProcessorName = plugin.FullName("standalone:chaos-processor@v1.3.5") + + t.Run("List", func(t *testing.T) { + is := is.New(t) + + list := underTest.List() + is.Equal(1, len(list)) + + got, ok := list[standaloneProcessorName] + is.True(ok) + + want := ChaosProcessorSpecifications() + is.Equal(got, want) + }) + + t.Run("NewProcessor", func(t *testing.T) { + is := is.New(t) + + p, err := underTest.NewProcessor(ctx, standaloneProcessorName, "test-processor") + is.NoErr(err) + + got, err := p.Specification() + is.NoErr(err) + + want := ChaosProcessorSpecifications() + is.Equal(got, want) + + is.NoErr(p.Teardown(ctx)) + }) + + t.Run("ConcurrentProcessors", func(t *testing.T) { + const ( + // spawn 50 processors, each processing 50 records simultaneously + processorCount = 50 + recordCount = 50 + ) + + var wg csync.WaitGroup + for i := 0; i < processorCount; i++ { + wg.Add(1) + go func(i int) { + defer wg.Done() + p, err := underTest.NewProcessor(ctx, "standalone:chaos-processor@v1.3.5", fmt.Sprintf("test-processor-%d", i)) + is.NoErr(err) + + err = p.Configure(ctx, map[string]string{"process.prefix": fmt.Sprintf("%d", i)}) + is.NoErr(err) + + rec := opencdc.Record{ + Payload: opencdc.Change{ + After: opencdc.RawData(uuid.NewString()), + }, + } + want := rec.Clone() + want.Payload.After = opencdc.RawData(fmt.Sprintf("%d", i) + string(want.Payload.After.Bytes())) + + for i := 0; i < recordCount; i++ { + got := p.Process(ctx, []opencdc.Record{rec}) + is.Equal(len(got), 1) + is.Equal(opencdc.Record(got[0].(sdk.SingleRecord)), want) + } + + is.NoErr(p.Teardown(ctx)) + }(i + 1) + } + err = wg.WaitTimeout(ctx, time.Minute) + is.NoErr(err) + }) +} diff --git a/pkg/plugin/processor/standalone/standalone_test.go b/pkg/plugin/processor/standalone/standalone_test.go new file mode 100644 index 000000000..37cde10ef --- /dev/null +++ b/pkg/plugin/processor/standalone/standalone_test.go @@ -0,0 +1,147 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package standalone + +import ( + "context" + "fmt" + "os" + "os/exec" + "testing" + "time" + + "github.com/conduitio/conduit-commons/config" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/csync" + "github.com/stealthrocket/wazergo" + "github.com/tetratelabs/wazero" + "github.com/tetratelabs/wazero/imports/wasi_snapshot_preview1" +) + +const ( + testPluginDir = "./test/wasm_processors/" + + testPluginChaosDir = testPluginDir + "chaos/" + testPluginMalformedDir = testPluginDir + "malformed/" + testPluginSpecifyErrorDir = testPluginDir + "specify_error/" +) + +var ( + // TestRuntime can be reused in tests to avoid recompiling the test modules + TestRuntime wazero.Runtime + CompiledHostModule *wazergo.CompiledModule[*hostModuleInstance] + + ChaosProcessorBinary []byte + MalformedProcessorBinary []byte + SpecifyErrorBinary []byte + + ChaosProcessorModule wazero.CompiledModule + SpecifyErrorModule wazero.CompiledModule + + testProcessorPaths = map[string]tuple[*[]byte, *wazero.CompiledModule]{ + testPluginChaosDir + "processor.wasm": {&ChaosProcessorBinary, &ChaosProcessorModule}, + testPluginMalformedDir + "processor.txt": {&MalformedProcessorBinary, nil}, + testPluginSpecifyErrorDir + "processor.wasm": {&SpecifyErrorBinary, &SpecifyErrorModule}, + } +) + +func TestMain(m *testing.M) { + exitOnError := func(err error, msg string) { + if err != nil { + _, _ = fmt.Fprintf(os.Stderr, "%v: %v", msg, err) + os.Exit(1) + } + } + + cmd := exec.Command("bash", "./test/build-test-processors.sh") + + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + + err := cmd.Run() + exitOnError(err, "error executing bash script") + + // instantiate shared test runtime + ctx := context.Background() + TestRuntime = wazero.NewRuntime(ctx) + + _, err = wasi_snapshot_preview1.Instantiate(ctx, TestRuntime) + exitOnError(err, "error instantiating WASI") + + CompiledHostModule, err = wazergo.Compile(ctx, TestRuntime, hostModule) + exitOnError(err, "error compiling host module") + + // load test processors + var wg csync.WaitGroup + for path, t := range testProcessorPaths { + *t.V1, err = os.ReadFile(path) + exitOnError(err, "error reading file "+path) + + if t.V2 == nil { + continue + } + + // compile modules in parallel + wg.Add(1) + go func(binary []byte, target *wazero.CompiledModule, path string) { + defer wg.Done() + var err error + *target, err = TestRuntime.CompileModule(ctx, binary) + exitOnError(err, "error compiling module "+path) + }(*t.V1, t.V2, path) + } + err = wg.WaitTimeout(ctx, time.Minute) + exitOnError(err, "timed out waiting on modules to compile") + + // run tests + code := m.Run() + + err = TestRuntime.Close(ctx) + exitOnError(err, "error closing wasm runtime") + + os.Exit(code) +} + +func ChaosProcessorSpecifications() sdk.Specification { + param := config.Parameter{ + Default: "success", + Type: config.ParameterTypeString, + Description: "prefix", + Validations: []config.Validation{ + config.ValidationInclusion{List: []string{"success", "error", "panic"}}, + }, + } + return sdk.Specification{ + Name: "chaos-processor", + Summary: "chaos processor summary", + Description: "chaos processor description", + Version: "v1.3.5", + Author: "Meroxa, Inc.", + Parameters: map[string]config.Parameter{ + "configure": param, + "open": param, + "process.prefix": { + Default: "", + Type: config.ParameterTypeString, + Description: "prefix to be added to the payload's after", + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + "process": param, + "teardown": param, + }, + } +} diff --git a/pkg/plugin/processor/standalone/test/build-test-processors.sh b/pkg/plugin/processor/standalone/test/build-test-processors.sh new file mode 100755 index 000000000..14c363590 --- /dev/null +++ b/pkg/plugin/processor/standalone/test/build-test-processors.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +WASM_PROCESSORS_DIR="$SCRIPT_DIR/wasm_processors" + +for dir in "$WASM_PROCESSORS_DIR"/*/; do + # Check if the directory contains a .go file + if [ -e "${dir}processor.go" ]; then + cd "$dir" || exit + + GOOS=wasip1 GOARCH=wasm go build -o processor.wasm processor.go + + cd "$WASM_PROCESSORS_DIR" || exit + fi +done diff --git a/pkg/plugin/processor/standalone/test/wasm_processors/chaos/processor.go b/pkg/plugin/processor/standalone/test/wasm_processors/chaos/processor.go new file mode 100644 index 000000000..fae071ff2 --- /dev/null +++ b/pkg/plugin/processor/standalone/test/wasm_processors/chaos/processor.go @@ -0,0 +1,122 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build wasm + +package main + +import ( + "context" + "errors" + + "github.com/conduitio/conduit-commons/config" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" +) + +func main() { + sdk.Run(&chaosProcessor{}) +} + +type chaosProcessor struct { + sdk.UnimplementedProcessor + cfg map[string]string +} + +func (p *chaosProcessor) Specification() (sdk.Specification, error) { + param := config.Parameter{ + Default: "success", + Type: config.ParameterTypeString, + Description: "prefix", + Validations: []config.Validation{ + config.ValidationInclusion{List: []string{"success", "error", "panic"}}, + }, + } + return sdk.Specification{ + Name: "chaos-processor", + Summary: "chaos processor summary", + Description: "chaos processor description", + Version: "v1.3.5", + Author: "Meroxa, Inc.", + Parameters: map[string]config.Parameter{ + "configure": param, + "open": param, + "process.prefix": { + Default: "", + Type: config.ParameterTypeString, + Description: "prefix to be added to the payload's after", + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + "process": param, + "teardown": param, + }, + }, nil +} + +func (p *chaosProcessor) Configure(_ context.Context, cfg map[string]string) error { + p.cfg = cfg + + err := p.methodBehavior("configure") + if err != nil { + return err + } + + return nil +} + +func (p *chaosProcessor) Open(context.Context) error { + return p.methodBehavior("open") +} + +func (p *chaosProcessor) methodBehavior(name string) error { + switch p.cfg[name] { + case "error": + return errors.New("boom") + case "panic": + panic(name + " panic") + case "", "success": + return nil + default: + panic("unknown mode: " + p.cfg[name]) + } +} + +func (p *chaosProcessor) Process(_ context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + err := p.methodBehavior("process") + if err != nil { + // on error we return a single record with the error + return []sdk.ProcessedRecord{sdk.ErrorRecord{Error: err}} + } + + _, ok := p.cfg["process.prefix"] + if !ok { + return []sdk.ProcessedRecord{sdk.ErrorRecord{Error: errors.New("missing prefix")}} + } + + out := make([]sdk.ProcessedRecord, len(records)) + for i, record := range records { + original := record.Payload.After.(opencdc.RawData) + record.Payload.After = opencdc.RawData(p.cfg["process.prefix"] + string(original.Bytes())) + + out[i] = sdk.SingleRecord(record) + } + + return out +} + +func (p *chaosProcessor) Teardown(context.Context) error { + return p.methodBehavior("teardown") +} diff --git a/pkg/plugin/processor/standalone/test/wasm_processors/malformed/processor.txt b/pkg/plugin/processor/standalone/test/wasm_processors/malformed/processor.txt new file mode 100644 index 000000000..f13acb5c1 --- /dev/null +++ b/pkg/plugin/processor/standalone/test/wasm_processors/malformed/processor.txt @@ -0,0 +1 @@ +this is not a valid wasm binary \ No newline at end of file diff --git a/pkg/plugin/processor/standalone/test/wasm_processors/specify_error/processor.go b/pkg/plugin/processor/standalone/test/wasm_processors/specify_error/processor.go new file mode 100644 index 000000000..056873472 --- /dev/null +++ b/pkg/plugin/processor/standalone/test/wasm_processors/specify_error/processor.go @@ -0,0 +1,56 @@ +// Copyright © 2023 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build wasm + +package main + +import ( + "context" + "errors" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" +) + +func main() { + sdk.Run(&testProcessor{}) +} + +type testProcessor struct { + sdk.UnimplementedProcessor +} + +func (p *testProcessor) Specification() (sdk.Specification, error) { + return sdk.Specification{}, errors.New("boom") +} + +func (p *testProcessor) Configure(context.Context, map[string]string) error { + // TODO implement me + panic("implement me") +} + +func (p *testProcessor) Open(context.Context) error { + // TODO implement me + panic("implement me") +} + +func (p *testProcessor) Process(context.Context, []opencdc.Record) []sdk.ProcessedRecord { + // TODO implement me + panic("implement me") +} + +func (p *testProcessor) Teardown(context.Context) error { + return nil +} diff --git a/pkg/processor/builder.go b/pkg/processor/builder.go deleted file mode 100644 index 7c33b937a..000000000 --- a/pkg/processor/builder.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package processor - -import ( - "sync" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" -) - -// GlobalBuilderRegistry is a global registry of processor builders. It should -// be treated as a read only variable. -var GlobalBuilderRegistry = NewBuilderRegistry() - -// Builder parses the config and if valid returns a processor, an error otherwise. -type Builder func(Config) (Interface, error) - -// BuilderRegistry is a registry for registering or looking up processor -// builders. The Register and Get methods are safe for concurrent use. -type BuilderRegistry struct { - builders map[string]Builder - - lock sync.RWMutex -} - -// NewBuilderRegistry returns an empty *BuilderRegistry. -func NewBuilderRegistry() *BuilderRegistry { - return &BuilderRegistry{ - builders: make(map[string]Builder), - } -} - -// MustRegister tries to register a builder and panics on error. -func (r *BuilderRegistry) MustRegister(procType string, b Builder) { - err := r.Register(procType, b) - if err != nil { - panic(cerrors.Errorf("register processor builder failed: %w", err)) - } -} - -// Register registers a processor builder under the specified type. -// If a builder is already registered under that type it returns an error. -func (r *BuilderRegistry) Register(procType string, b Builder) error { - r.lock.Lock() - defer r.lock.Unlock() - - if _, ok := r.builders[procType]; ok { - return cerrors.Errorf("processor builder with type %q already registered", procType) - } - r.builders[procType] = b - - return nil -} - -// MustGet tries to get a builder and panics on error. -func (r *BuilderRegistry) MustGet(procType string) Builder { - b, err := r.Get(procType) - if err != nil { - panic(cerrors.Errorf("get processor builder failed: %w", err)) - } - return b -} - -// Get returns the processor builder registered under the specified type. -// If no builder is registered under that type it returns an error. -func (r *BuilderRegistry) Get(procType string) (Builder, error) { - r.lock.RLock() - defer r.lock.RUnlock() - - b, ok := r.builders[procType] - if !ok { - return nil, cerrors.Errorf("processor builder %q not found", procType) - } - - return b, nil -} diff --git a/pkg/processor/errors.go b/pkg/processor/errors.go index c3fb525c1..8eaff8e36 100644 --- a/pkg/processor/errors.go +++ b/pkg/processor/errors.go @@ -17,9 +17,6 @@ package processor import "github.com/conduitio/conduit/pkg/foundation/cerrors" var ( - // ErrSkipRecord is passed by a processor when it should Ack and skip a Record. - // It must be separate from a plain error so that we continue instead of marking - // the Pipeline status as degraded. - ErrSkipRecord = cerrors.New("record skipped") ErrInstanceNotFound = cerrors.New("processor instance not found") + ErrProcessorRunning = cerrors.New("processor already running") ) diff --git a/pkg/processor/instance.go b/pkg/processor/instance.go index 5daa8374f..cce408b2b 100644 --- a/pkg/processor/instance.go +++ b/pkg/processor/instance.go @@ -12,7 +12,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -//go:generate mockgen -destination=mock/processor.go -package=mock -mock_names=Interface=Processor . Interface //go:generate stringer -type=ParentType -trimprefix ParentType package processor @@ -21,8 +20,8 @@ import ( "context" "time" + "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/inspector" - "github.com/conduitio/conduit/pkg/record" ) const ( @@ -42,37 +41,46 @@ type ( ProvisionType int ) -// Interface is the interface that represents a single message processor that -// can be executed on one record and manipulate it. -type Interface interface { - // Process runs the processor function on a record. - Process(ctx context.Context, record record.Record) (record.Record, error) - - // InspectIn starts an inspection session for input records for this processor. - InspectIn(ctx context.Context, id string) *inspector.Session - // InspectOut starts an inspection session for output records for this processor. - InspectOut(ctx context.Context, id string) *inspector.Session - - // Close closes this processor and releases any resources - // which may have been used by it. - Close() -} - -// Instance represents a processor instance. +// Instance represents a processor persisted in a database. +// An Instance is used to create a RunnableProcessor which represents +// a processor which can be used in a pipeline. type Instance struct { ID string CreatedAt time.Time UpdatedAt time.Time ProvisionedBy ProvisionType - Type string + Plugin string // Condition is a goTemplate formatted string, the value provided to the template is a sdk.Record, it should evaluate // to a boolean value, indicating a condition to run the processor for a specific record or not. (template functions // provided by `sprig` are injected) Condition string Parent Parent Config Config - Processor Interface + + // Needed because a user can start inspecting a processor + // before the processor is actually running. + inInsp *inspector.Inspector + outInsp *inspector.Inspector + running bool +} + +func (i *Instance) init(logger log.CtxLogger) { + i.inInsp = inspector.New(logger, inspector.DefaultBufferSize) + i.outInsp = inspector.New(logger, inspector.DefaultBufferSize) +} + +func (i *Instance) InspectIn(ctx context.Context, id string) *inspector.Session { + return i.inInsp.NewSession(ctx, id) +} + +func (i *Instance) InspectOut(ctx context.Context, id string) *inspector.Session { + return i.outInsp.NewSession(ctx, id) +} + +func (i *Instance) Close() { + i.inInsp.Close() + i.outInsp.Close() } // Parent represents the connection to the entity a processor is connected to. diff --git a/pkg/processor/mock/plugin_service.go b/pkg/processor/mock/plugin_service.go new file mode 100644 index 000000000..aacf4d551 --- /dev/null +++ b/pkg/processor/mock/plugin_service.go @@ -0,0 +1,56 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/conduitio/conduit/pkg/processor (interfaces: PluginService) +// +// Generated by this command: +// +// mockgen -destination=mock/plugin_service.go -package=mock -mock_names=PluginService=PluginService . PluginService +// + +// Package mock is a generated GoMock package. +package mock + +import ( + context "context" + reflect "reflect" + + sdk "github.com/conduitio/conduit-processor-sdk" + gomock "go.uber.org/mock/gomock" +) + +// PluginService is a mock of PluginService interface. +type PluginService struct { + ctrl *gomock.Controller + recorder *PluginServiceMockRecorder +} + +// PluginServiceMockRecorder is the mock recorder for PluginService. +type PluginServiceMockRecorder struct { + mock *PluginService +} + +// NewPluginService creates a new mock instance. +func NewPluginService(ctrl *gomock.Controller) *PluginService { + mock := &PluginService{ctrl: ctrl} + mock.recorder = &PluginServiceMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *PluginService) EXPECT() *PluginServiceMockRecorder { + return m.recorder +} + +// NewProcessor mocks base method. +func (m *PluginService) NewProcessor(arg0 context.Context, arg1, arg2 string) (sdk.Processor, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "NewProcessor", arg0, arg1, arg2) + ret0, _ := ret[0].(sdk.Processor) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// NewProcessor indicates an expected call of NewProcessor. +func (mr *PluginServiceMockRecorder) NewProcessor(arg0, arg1, arg2 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewProcessor", reflect.TypeOf((*PluginService)(nil).NewProcessor), arg0, arg1, arg2) +} diff --git a/pkg/processor/mock/processor.go b/pkg/processor/mock/processor.go deleted file mode 100644 index ceb1d1e45..000000000 --- a/pkg/processor/mock/processor.go +++ /dev/null @@ -1,97 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/conduitio/conduit/pkg/processor (interfaces: Interface) -// -// Generated by this command: -// -// mockgen -destination=mock/processor.go -package=mock -mock_names=Interface=Processor . Interface -// - -// Package mock is a generated GoMock package. -package mock - -import ( - context "context" - reflect "reflect" - - inspector "github.com/conduitio/conduit/pkg/inspector" - record "github.com/conduitio/conduit/pkg/record" - gomock "go.uber.org/mock/gomock" -) - -// Processor is a mock of Interface interface. -type Processor struct { - ctrl *gomock.Controller - recorder *ProcessorMockRecorder -} - -// ProcessorMockRecorder is the mock recorder for Processor. -type ProcessorMockRecorder struct { - mock *Processor -} - -// NewProcessor creates a new mock instance. -func NewProcessor(ctrl *gomock.Controller) *Processor { - mock := &Processor{ctrl: ctrl} - mock.recorder = &ProcessorMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *Processor) EXPECT() *ProcessorMockRecorder { - return m.recorder -} - -// Close mocks base method. -func (m *Processor) Close() { - m.ctrl.T.Helper() - m.ctrl.Call(m, "Close") -} - -// Close indicates an expected call of Close. -func (mr *ProcessorMockRecorder) Close() *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*Processor)(nil).Close)) -} - -// InspectIn mocks base method. -func (m *Processor) InspectIn(arg0 context.Context, arg1 string) *inspector.Session { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "InspectIn", arg0, arg1) - ret0, _ := ret[0].(*inspector.Session) - return ret0 -} - -// InspectIn indicates an expected call of InspectIn. -func (mr *ProcessorMockRecorder) InspectIn(arg0, arg1 any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InspectIn", reflect.TypeOf((*Processor)(nil).InspectIn), arg0, arg1) -} - -// InspectOut mocks base method. -func (m *Processor) InspectOut(arg0 context.Context, arg1 string) *inspector.Session { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "InspectOut", arg0, arg1) - ret0, _ := ret[0].(*inspector.Session) - return ret0 -} - -// InspectOut indicates an expected call of InspectOut. -func (mr *ProcessorMockRecorder) InspectOut(arg0, arg1 any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InspectOut", reflect.TypeOf((*Processor)(nil).InspectOut), arg0, arg1) -} - -// Process mocks base method. -func (m *Processor) Process(arg0 context.Context, arg1 record.Record) (record.Record, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "Process", arg0, arg1) - ret0, _ := ret[0].(record.Record) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// Process indicates an expected call of Process. -func (mr *ProcessorMockRecorder) Process(arg0, arg1 any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Process", reflect.TypeOf((*Processor)(nil).Process), arg0, arg1) -} diff --git a/pkg/processor/procbuiltin/decodewithschema.go b/pkg/processor/procbuiltin/decodewithschema.go deleted file mode 100644 index b5b07f932..000000000 --- a/pkg/processor/procbuiltin/decodewithschema.go +++ /dev/null @@ -1,227 +0,0 @@ -// Copyright © 2023 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "crypto/tls" - "crypto/x509" - "fmt" - "os" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/foundation/multierror" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/processor/schemaregistry" - "github.com/conduitio/conduit/pkg/record" - "github.com/lovromazgon/franz-go/pkg/sr" - "github.com/rs/zerolog" -) - -const ( - decodeWithSchemaKeyProcType = "decodewithschemakey" - decodeWithSchemaPayloadProcType = "decodewithschemapayload" - - schemaRegistryConfigURL = "url" - schemaRegistryConfigAuthBasicUsername = "auth.basic.username" - schemaRegistryConfigAuthBasicPassword = "auth.basic.password" //nolint:gosec // false positive, these are not credentials - schemaRegistryConfigTLSCACert = "tls.ca.cert" - schemaRegistryConfigTLSClientCert = "tls.client.cert" - schemaRegistryConfigTLSClientKey = "tls.client.key" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(decodeWithSchemaKeyProcType, DecodeWithSchemaKey) - processor.GlobalBuilderRegistry.MustRegister(decodeWithSchemaPayloadProcType, DecodeWithSchemaPayload) -} - -// DecodeWithSchemaKey builds a processor with the following config fields: -// - `url` (Required) - URL of the schema registry (e.g. http://localhost:8085) -// - `auth.basic.username` (Optional) - Configures the username to use with -// basic authentication. This option is required if `auth.basic.password` -// contains a value. If both `auth.basic.username` and `auth.basic.password` -// are empty basic authentication is disabled. -// - `auth.basic.password` (Optional) - Configures the password to use with -// basic authentication. This option is required if `auth.basic.username` -// contains a value. If both `auth.basic.username` and `auth.basic.password` -// are empty basic authentication is disabled. -// - `tls.ca.cert` (Optional) - Path to a file containing PEM encoded CA -// certificates. If this option is empty, Conduit falls back to using the -// host's root CA set. -// - `tls.client.cert` (Optional) - Path to a file containing a PEM encoded -// certificate. This option is required if `tls.client.key` contains a value. -// If both `tls.client.cert` and `tls.client.key` are empty TLS is disabled. -// - `tls.client.key` (Optional) - Path to a file containing a PEM encoded -// private key. This option is required if `tls.client.cert` contains a value. -// If both `tls.client.cert` and `tls.client.key` are empty TLS is disabled. -// -// The processor takes raw data (bytes) and decodes it from the Confluent wire -// format into structured data. It extracts the schema ID from the data, -// downloads the associated schema from the schema registry and decodes the -// payload. The schema is cached locally after it's first downloaded. Currently, -// the processor only supports the Avro format. If the processor encounters -// structured data or the data can't be decoded it returns an error. -// -// More info about the Confluent wire format: https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/index.html#wire-format -// More info about the Confluent schema registry: https://docs.confluent.io/platform/current/schema-registry/index.html -func DecodeWithSchemaKey(config processor.Config) (processor.Interface, error) { - return decodeWithSchema(decodeWithSchemaKeyProcType, recordKeyGetSetter{}, config) -} - -// DecodeWithSchemaPayload builds the same processor as DecodeWithSchemaKey, -// except that it operates on the field Record.Payload.After. -func DecodeWithSchemaPayload(config processor.Config) (processor.Interface, error) { - return decodeWithSchema(decodeWithSchemaPayloadProcType, recordPayloadGetSetter{}, config) -} - -type schemaRegistryConfig struct { - url string - - basicAuthUsername string - basicAuthPassword string - - tlsCACert *x509.CertPool - tlsClientCert *tls.Certificate -} - -func (c *schemaRegistryConfig) Parse(cfg processor.Config) error { - var err error - if c.url, err = getConfigFieldString(cfg, schemaRegistryConfigURL); err != nil { - return err - } - if err = c.parseBasicAuth(cfg); err != nil { - return err - } - return c.parseTLS(cfg) -} - -func (c *schemaRegistryConfig) ClientOptions() []sr.Opt { - clientOpts := []sr.Opt{sr.URLs(c.url), sr.Normalize()} - if c.basicAuthUsername != "" && c.basicAuthPassword != "" { - clientOpts = append(clientOpts, sr.BasicAuth(c.basicAuthUsername, c.basicAuthPassword)) - } - if c.tlsClientCert != nil { - tlsConfig := &tls.Config{ - Certificates: []tls.Certificate{*c.tlsClientCert}, - MinVersion: tls.VersionTLS12, - } - if c.tlsCACert != nil { - tlsConfig.RootCAs = c.tlsCACert - } - clientOpts = append(clientOpts, sr.DialTLSConfig(tlsConfig)) - } - return clientOpts -} - -func (c *schemaRegistryConfig) parseBasicAuth(cfg processor.Config) error { - username := cfg.Settings[schemaRegistryConfigAuthBasicUsername] - password := cfg.Settings[schemaRegistryConfigAuthBasicPassword] - - switch { - case username == "" && password == "": - // no basic auth set - return nil - case username == "": - return cerrors.Errorf("missing field %q: specify a username to enable basic auth or remove field %q", schemaRegistryConfigAuthBasicUsername, schemaRegistryConfigAuthBasicPassword) - case password == "": - return cerrors.Errorf("missing field %q: specify a password to enable basic auth or remove field %q", schemaRegistryConfigAuthBasicPassword, schemaRegistryConfigAuthBasicUsername) - } - c.basicAuthUsername = username - c.basicAuthPassword = password - return nil -} - -func (c *schemaRegistryConfig) parseTLS(cfg processor.Config) error { - clientCertPath := cfg.Settings[schemaRegistryConfigTLSClientCert] - clientKeyPath := cfg.Settings[schemaRegistryConfigTLSClientKey] - caCertPath := cfg.Settings[schemaRegistryConfigTLSCACert] - - if clientCertPath == "" && clientKeyPath == "" && caCertPath == "" { - // no tls config set - return nil - } else if clientCertPath == "" || clientKeyPath == "" { - // we are missing some configuration fields - err := cerrors.New("invalid TLS config") - if clientCertPath == "" { - err = multierror.Append(err, cerrors.Errorf("missing field %q", schemaRegistryConfigTLSClientCert)) - } - if clientKeyPath == "" { - err = multierror.Append(err, cerrors.Errorf("missing field %q", schemaRegistryConfigTLSClientKey)) - } - // CA cert is optional, we don't check if it's missing - return err - } - - clientCert, err := tls.LoadX509KeyPair(clientCertPath, clientKeyPath) - if err != nil { - return fmt.Errorf("failed to load client certificate: %w", err) - } - c.tlsClientCert = &clientCert - - if caCertPath != "" { - // load custom CA cert - caCert, err := os.ReadFile(caCertPath) - if err != nil { - return fmt.Errorf("failed to load CA certificate: %w", err) - } - caCertPool := x509.NewCertPool() - if ok := caCertPool.AppendCertsFromPEM(caCert); !ok { - return cerrors.New("invalid CA cert") - } - c.tlsCACert = caCertPool - } - - return nil -} - -func decodeWithSchema( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - var c schemaRegistryConfig - err := c.Parse(config) - if err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - - // TODO get logger from config or some other place - logger := log.InitLogger(zerolog.InfoLevel, log.FormatCLI) - - client, err := schemaregistry.NewClient(logger, c.ClientOptions()...) - if err != nil { - return nil, cerrors.Errorf("%s: could not create schema registry client: %w", processorType, err) - } - decoder := schemaregistry.NewDecoder(client, logger, &sr.Serde{}) - - return NewFuncWrapper(func(ctx context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - - switch d := data.(type) { - case record.RawData: - sd, err := decoder.Decode(ctx, d) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: %w:", processorType, err) - } - r = getSetter.Set(r, sd) - return r, nil - case record.StructuredData: - return record.Record{}, cerrors.Errorf("%s: structured data not supported", processorType) - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - }), nil -} diff --git a/pkg/processor/procbuiltin/encodewithschema.go b/pkg/processor/procbuiltin/encodewithschema.go deleted file mode 100644 index 01de3910f..000000000 --- a/pkg/processor/procbuiltin/encodewithschema.go +++ /dev/null @@ -1,239 +0,0 @@ -// Copyright © 2023 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/processor/schemaregistry" - "github.com/conduitio/conduit/pkg/record" - "github.com/lovromazgon/franz-go/pkg/sr" - "github.com/rs/zerolog" -) - -const ( - encodeWithSchemaKeyProcType = "encodewithschemakey" - encodeWithSchemaPayloadProcType = "encodewithschemapayload" - - encodeWithSchemaStrategy = "schema.strategy" - encodeWithSchemaPreRegisteredSubject = "schema.preRegistered.subject" - encodeWithSchemaPreRegisteredVersion = "schema.preRegistered.version" - encodeWithSchemaAutoRegisterSubject = "schema.autoRegister.subject" - encodeWithSchemaAutoRegisterFormat = "schema.autoRegister.format" - - encodeWithSchemaStrategyPreRegistered = "preRegistered" - encodeWithSchemaStrategyAutoRegister = "autoRegister" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(encodeWithSchemaKeyProcType, EncodeWithSchemaKey) - processor.GlobalBuilderRegistry.MustRegister(encodeWithSchemaPayloadProcType, EncodeWithSchemaPayload) -} - -// EncodeWithSchemaKey builds a processor with the following config fields: -// - `url` (Required) - URL of the schema registry (e.g. http://localhost:8085) -// - `schema.strategy` (Required, Enum: `preRegistered`,`autoRegister`) - Specifies -// which strategy to use to determine the schema for the record. Available -// strategies: -// - `preRegistered` (recommended) - Download an existing schema from the schema -// registry. This strategy is further configured with options starting -// with `schema.preRegistered.*`. -// - `autoRegister` (for development purposes) - Infer the schema from the -// record and register it in the schema registry. This strategy is further -// configured with options starting with `schema.autoRegister.*`. -// - `schema.preRegistered.subject` (Required if `schema.strategy` = `preRegistered`) - -// Specifies the subject of the schema in the schema registry used to encode -// the record. -// - `schema.preRegistered.version` (Required if `schema.strategy` = `preRegistered`) - -// Specifies the version of the schema in the schema registry used to encode -// the record. -// - `schema.autoRegister.subject` (Required if `schema.strategy` = `autoRegister`) - -// Specifies the subject name under which the inferred schema will be -// registered in the schema registry. -// - `schema.autoRegister.format` (Required if `schema.strategy` = `autoRegister`, Enum: `avro`) - -// Specifies the schema format that should be inferred. Currently the only -// supported format is `avro`. -// - `auth.basic.username` (Optional) - Configures the username to use with -// basic authentication. This option is required if `auth.basic.password` -// contains a value. If both `auth.basic.username` and `auth.basic.password` -// are empty basic authentication is disabled. -// - `auth.basic.password` (Optional) - Configures the password to use with -// basic authentication. This option is required if `auth.basic.username` -// contains a value. If both `auth.basic.username` and `auth.basic.password` -// are empty basic authentication is disabled. -// - `tls.ca.cert` (Optional) - Path to a file containing PEM encoded CA -// certificates. If this option is empty, Conduit falls back to using the -// host's root CA set. -// - `tls.client.cert` (Optional) - Path to a file containing a PEM encoded -// certificate. This option is required if `tls.client.key` contains a value. -// If both `tls.client.cert` and `tls.client.key` are empty TLS is disabled. -// - `tls.client.key` (Optional) - Path to a file containing a PEM encoded -// private key. This option is required if `tls.client.cert` contains a value. -// If both `tls.client.cert` and `tls.client.key` are empty TLS is disabled. -// -// The processor takes structured data and encodes it using a schema into the -// Confluent wire format. It provides two strategies for determining the -// schema: -// -// - `preRegistered` (recommended) -// -// This strategy downloads an existing schema from the schema registry and -// uses it to encode the record. This requires the schema to already be -// registered in the schema registry. The schema is downloaded only once and -// cached locally. -// -// - `autoRegister` (for development purposes) -// This strategy infers the schema by inspecting the structured data and -// registers it in the schema registry. If the record schema is known in -// advance it's recommended to use the `preRegistered` strategy and manually -// register the schema, as this strategy comes with limitations. -// -// The strategy uses reflection to traverse the structured data of each -// record and determine the type of each field. If a specific field is set -// to `nil` the processor won't have enough information to determine the -// type and will default to a nullable string. Because of this it is not -// guaranteed that two records with the same structure produce the same -// schema or even a backwards compatible schema. The processor registers -// each inferred schema in the schema registry with the same subject, -// therefore the schema compatibility checks need to be disabled for this -// schema to prevent failures. If the schema subject does not exist before -// running this processor, it will automatically set the correct -// compatibility settings the first time it registers the schema. -// -// The processor currently only supports the Avro format. -// -// More info about the Confluent wire format: https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/index.html#wire-format -// More info about the Confluent schema registry: https://docs.confluent.io/platform/current/schema-registry/index.html -func EncodeWithSchemaKey(config processor.Config) (processor.Interface, error) { - return encodeWithSchema(encodeWithSchemaKeyProcType, recordKeyGetSetter{}, config) -} - -// EncodeWithSchemaPayload builds the same processor as EncodeWithSchemaKey, -// except that it operates on the field Record.Payload.After. -func EncodeWithSchemaPayload(config processor.Config) (processor.Interface, error) { - return encodeWithSchema(encodeWithSchemaPayloadProcType, recordPayloadGetSetter{}, config) -} - -type encodeWithSchemaConfig struct { - schemaRegistryConfig - strategy schemaregistry.SchemaStrategy -} - -func (c *encodeWithSchemaConfig) Parse(cfg processor.Config) error { - if err := c.schemaRegistryConfig.Parse(cfg); err != nil { - return err - } - return c.parseSchemaStrategy(cfg) -} - -func (c *encodeWithSchemaConfig) parseSchemaStrategy(cfg processor.Config) error { - strategy, err := getConfigFieldString(cfg, encodeWithSchemaStrategy) - if err != nil { - return err - } - - switch strategy { - case encodeWithSchemaStrategyPreRegistered: - return c.parseSchemaStrategyPreRegistered(cfg) - case encodeWithSchemaStrategyAutoRegister: - return c.parseSchemaStrategyAutoRegister(cfg) - default: - return cerrors.Errorf("failed to parse %q: unknown schema strategy %q", encodeWithSchemaStrategy, strategy) - } -} - -func (c *encodeWithSchemaConfig) parseSchemaStrategyPreRegistered(cfg processor.Config) error { - subject, err := getConfigFieldString(cfg, encodeWithSchemaPreRegisteredSubject) - if err != nil { - return err - } - // TODO allow version to be set to "latest" - version, err := getConfigFieldInt64(cfg, encodeWithSchemaPreRegisteredVersion) - if err != nil { - return err - } - c.strategy = schemaregistry.DownloadSchemaStrategy{ - Subject: subject, - Version: int(version), - } - return nil -} - -func (c *encodeWithSchemaConfig) parseSchemaStrategyAutoRegister(cfg processor.Config) error { - subject, err := getConfigFieldString(cfg, encodeWithSchemaAutoRegisterSubject) - if err != nil { - return err - } - format, err := getConfigFieldString(cfg, encodeWithSchemaAutoRegisterFormat) - if err != nil { - return err - } - var schemaType sr.SchemaType - err = schemaType.UnmarshalText([]byte(format)) - if err != nil { - return cerrors.Errorf("failed to parse %q: %w", encodeWithSchemaAutoRegisterSubject, err) - } - c.strategy = schemaregistry.ExtractAndUploadSchemaStrategy{ - Type: schemaType, - Subject: subject, - } - return nil -} - -func (c *encodeWithSchemaConfig) SchemaStrategy() schemaregistry.SchemaStrategy { - return c.strategy -} - -func encodeWithSchema( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - var c encodeWithSchemaConfig - err := c.Parse(config) - if err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - - // TODO get logger from config or some other place - logger := log.InitLogger(zerolog.InfoLevel, log.FormatCLI) - - client, err := schemaregistry.NewClient(logger, c.ClientOptions()...) - if err != nil { - return nil, cerrors.Errorf("%s: could not create schema registry client: %w", processorType, err) - } - encoder := schemaregistry.NewEncoder(client, logger, &sr.Serde{}, c.SchemaStrategy()) - - return NewFuncWrapper(func(ctx context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - - switch d := data.(type) { - case record.RawData: - return record.Record{}, cerrors.Errorf("%s: raw data not supported (hint: if your records carry JSON data you can parse them into structured data with the processor `parsejsonpayload`)", processorType) - case record.StructuredData: - rd, err := encoder.Encode(ctx, d) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: %w:", processorType, err) - } - r = getSetter.Set(r, rd) - return r, nil - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - }), nil -} diff --git a/pkg/processor/procbuiltin/extractfield.go b/pkg/processor/procbuiltin/extractfield.go deleted file mode 100644 index 55a53d14b..000000000 --- a/pkg/processor/procbuiltin/extractfield.go +++ /dev/null @@ -1,98 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "fmt" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -const ( - extractFieldKeyProcType = "extractfieldkey" - extractFieldPayloadProcType = "extractfieldpayload" - - extractFieldConfigField = "field" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(extractFieldKeyProcType, ExtractFieldKey) - processor.GlobalBuilderRegistry.MustRegister(extractFieldPayloadProcType, ExtractFieldPayload) -} - -// ExtractFieldKey builds the following processor: -// - If the key is raw, return an error (not supported yet). -// - If the key is structured, extract the field and use it to replace the -// entire key. -func ExtractFieldKey(config processor.Config) (processor.Interface, error) { - return extractField(extractFieldKeyProcType, recordKeyGetSetter{}, config) -} - -// ExtractFieldPayload builds the same processor as ExtractFieldKey, except that -// it operates on the field Record.Payload.After. -func ExtractFieldPayload(config processor.Config) (processor.Interface, error) { - return extractField(extractFieldPayloadProcType, recordPayloadGetSetter{}, config) -} - -func extractField( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - var ( - err error - fieldName string - ) - - if fieldName, err = getConfigFieldString(config, extractFieldConfigField); err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - - return NewFuncWrapper(func(_ context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - - switch d := data.(type) { - case record.RawData: - if d.Schema == nil { - return record.Record{}, cerrors.Errorf("%s: schemaless raw data not supported", processorType) - } - return record.Record{}, cerrors.Errorf("%s: data with schema not supported yet", processorType) // TODO - case record.StructuredData: - // TODO add support for nested fields - extractedField := d[fieldName] - if extractedField == nil { - return record.Record{}, cerrors.Errorf("%s: field %q not found", processorType, fieldName) - } - - switch v := extractedField.(type) { - case map[string]interface{}: - data = record.StructuredData(v) - case []byte: - data = record.RawData{Raw: v} - default: - // marshal as string by default - data = record.RawData{Raw: []byte(fmt.Sprint(v))} - } - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - - r = getSetter.Set(r, data) - return r, nil - }), nil -} diff --git a/pkg/processor/procbuiltin/extractfield_test.go b/pkg/processor/procbuiltin/extractfield_test.go deleted file mode 100644 index 3038bbdf3..000000000 --- a/pkg/processor/procbuiltin/extractfield_test.go +++ /dev/null @@ -1,291 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "reflect" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/conduitio/conduit/pkg/record/schema/mock" - "github.com/matryer/is" -) - -func TestExtractFieldKey_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: ""}, - }}, - wantErr: true, - }, { - name: "non-empty field returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }}, - wantErr: false, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := ExtractFieldKey(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("ExtractFieldKey() error = %v, wantErr = %v", err, tt.wantErr) - return - } - }) - } -} - -func TestExtractFieldKey_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data", - config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - }, - }}, - want: record.Record{ - Key: record.RawData{ - Raw: []byte("123"), - }, - }, - wantErr: false, - }, { - name: "structured data field not found", - config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "bar": 123, - "baz": []byte("123"), - }, - }}, - wantErr: true, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }}, - wantErr: true, // not supported - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := ExtractFieldKey(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("process() got = %v, want = %v", got, tt.want) - } - }) - } -} - -func TestExtractFieldPayload_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: ""}, - }}, - wantErr: true, - }, { - name: "non-empty field returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }}, - wantErr: false, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := ExtractFieldPayload(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("ExtractFieldPayload() error = %v, wantErr = %v", err, tt.wantErr) - return - } - }) - } -} - -func TestExtractFieldPayload_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data", - config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("123"), - }, - }, - }, - wantErr: false, - }, { - name: "structured data field not found", - config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": []byte("123"), - }, - }, - }}, - wantErr: true, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }, - }}, - wantErr: true, // not supported - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{extractFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := ExtractFieldPayload(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("process() got = %v, want = %v", got, tt.want) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/filterfield.go b/pkg/processor/procbuiltin/filterfield.go deleted file mode 100644 index 9f4d0a8e6..000000000 --- a/pkg/processor/procbuiltin/filterfield.go +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "bytes" - "context" - - "github.com/antchfx/jsonquery" - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -const ( - // Types for the Filter in global builder registry - filterFieldKeyProcType = "filterfieldkey" - filterFieldPayloadProcType = "filterfieldpayload" - - // Config Fields for each processor - filterFieldConfigType = "type" - filterFieldConfigCondition = "condition" - filterFieldConfigMissingOrNull = "missingornull" - filterFieldConfigExists = "exists" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(filterFieldKeyProcType, FilterFieldKey) - processor.GlobalBuilderRegistry.MustRegister(filterFieldPayloadProcType, FilterFieldPayload) -} - -// FilterFieldKey builds a processor with the following config fields: -// - `type` sets the behavior to "include" or "exclude" the record based on the -// result of the condition. -// - `condition` is an XPath query expression that the user defines to forward -// or drop a record on its results. -// - `missingornull` defines how to handle the record in the event the fields -// the query would use don't exist. -// - `exists` field in the config gives the user a chance to define an existence -// query for a given filter. -// -// If `condition` passes, then it will immediately handle the record as -// `type` dictates. If `condition` doesn't match and `exists` matches nothing, -// then it will handle the record as `missingornull` specifies. -// -// Example processor config with noted possible values: -// -// { -// "type": "include", // [include, exclude] -// "condition":"", -// "exists": "", -// "missingornull": "fail" // [fail, include, exclude] -// } -func FilterFieldKey(config processor.Config) (processor.Interface, error) { - return filterField(filterFieldKeyProcType, recordKeyGetSetter{}, config) -} - -// FilterFieldPayload builds the same processor as FilterFieldKey, except that -// it operates on the field Record.Payload.After. -func FilterFieldPayload(config processor.Config) (processor.Interface, error) { - return filterField(filterFieldPayloadProcType, recordPayloadGetSetter{}, config) -} - -func filterField( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - if len(config.Settings) == 0 { - return nil, cerrors.New("must provide non-empty config") - } - var ( - filtertype string - filtercondition string - filternull string - filterexists string - ) - - // assign the values from our config - filtertype = config.Settings[filterFieldConfigType] - filtercondition = config.Settings[filterFieldConfigCondition] - filternull = config.Settings[filterFieldConfigMissingOrNull] - filterexists = config.Settings[filterFieldConfigExists] - - if filtertype == "" { - return nil, cerrors.New("must specify include or exclude filter type") - } - if filtercondition == "" { - return nil, cerrors.New("must specify filter condition") - } - // if filternull is not provided, filternull should fail loudly - if filternull == "" { - filternull = "fail" - } - - return NewFuncWrapper(func(_ context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - switch d := data.(type) { - case record.RawData: - if d.Schema == nil { - return record.Record{}, cerrors.Errorf("%s: schemaless raw data not supported", processorType) - } - return record.Record{}, cerrors.Errorf("%s: data with schema not supported yet", processorType) // TODO - case record.StructuredData: - doc, err := jsonquery.Parse(bytes.NewReader(d.Bytes())) - if err != nil { - return record.Record{}, cerrors.Errorf("filterfield failed to parse path: %w", err) - } - matches, err := jsonquery.Query(doc, filtercondition) - if err != nil { - return record.Record{}, cerrors.Errorf("invalid XPath expression in 'condition': %w", err) - } - - if matches == nil { - // check the filterexists query if one is set. - if filterexists != "" { - exists, err := jsonquery.QueryAll(doc, filterexists) - if err != nil { - return record.Record{}, cerrors.Errorf("invalid XPath expression in 'exists': %w", err) - } - // if it matches, handle normal drop record behavior. - if len(exists) == 0 { - // if it doesn't match, defer to filternull behavior - switch filternull { - case "include": - return r, nil - case "exclude": - return record.Record{}, processor.ErrSkipRecord - case "fail": - // fail should fail loudly with an existence error - return record.Record{}, cerrors.Errorf("field does not exist: %s", filterexists) - } - } - } - return record.Record{}, processor.ErrSkipRecord - } - - // filtercondition passed - // handle matches based on filtertype as normal - switch filtertype { - case "include": - return r, nil - case "exclude": - return record.Record{}, processor.ErrSkipRecord - default: - return record.Record{}, cerrors.Errorf("invalid filtertype: %s", filtertype) - } - - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - }), nil -} diff --git a/pkg/processor/procbuiltin/filterfield_test.go b/pkg/processor/procbuiltin/filterfield_test.go deleted file mode 100644 index 0fb7abccc..000000000 --- a/pkg/processor/procbuiltin/filterfield_test.go +++ /dev/null @@ -1,444 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "testing" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/google/go-cmp/cmp" - "github.com/matryer/is" -) - -func TestFilterFieldKey_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "nil config returns error", - args: args{ - config: processor.Config{}, - }, - wantErr: true, - }, - { - name: "empty config returns error", - args: args{ - config: processor.Config{ - Settings: map[string]string{}, - }, - }, - wantErr: true, - }, - { - name: "empty type returns error", - args: args{ - config: processor.Config{ - Settings: map[string]string{ - "type": "", - "condition": "$[key]", - }, - }, - }, - wantErr: true, - }, - { - name: "empty condition returns error", - args: args{ - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": "", - "fail": "include", - }, - }, - }, - wantErr: true, - }, - { - name: "valid config should return processor", - args: args{ - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": ".key", - "fail": "include", - }, - }, - }, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := FilterFieldKey(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("filterField() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestFilterFieldKey_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - args args - config processor.Config - want record.Record - wantErr bool - err error - }{ - { - name: "should return error on invalid condition", - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": "////", - "fail": "include", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "id": "foo", - }, - }}, - want: record.Record{}, - wantErr: true, - }, - { - name: "should forward record on condition", - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": ".id", - "fail": "include", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "id": "foo", - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "id": "foo", - }, - }, - wantErr: false, - }, - { - name: "should drop record on condition", - config: processor.Config{ - Settings: map[string]string{ - "type": "exclude", - "condition": ".id", - "fail": "include", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "id": "foo", - }, - }}, - want: record.Record{}, - wantErr: true, - err: processor.ErrSkipRecord, - }, - { - name: "should handle missing or null by failing", - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": "@id", - "missingornull": "fail", - "exists": "id", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "user": "foo", - }, - }}, - want: record.Record{}, - wantErr: true, - err: cerrors.New("field does not exist: id"), - }, - { - name: "should handle missing or null by including", - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": "@id", - "missingornull": "include", - "exists": "@id", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "user": "foo", - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "user": "foo", - }, - }, - wantErr: false, - }, - { - name: "should handle missing or null by excluding", - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": "@id", - "missingornull": "exclude", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "user": "foo", - }, - }}, - want: record.Record{}, - wantErr: true, - err: processor.ErrSkipRecord, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := FilterFieldKey(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("FilterFieldKey() error = %v, wantErr %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Logf("DIFF: %+s", diff) - t.Fail() - } - if tt.err != nil { - if diff := cmp.Diff(tt.err.Error(), err.Error()); diff != "" { - t.Errorf("DIFF: %s", diff) - } - } - }) - } -} - -func TestFilterFieldPayload_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{ - { - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, - { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, - { - name: "empty condition returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "missingornull": "fail", - "condition": "", - }, - }}, - wantErr: true, - }, - { - name: "empty type returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - "type": "", - "condition": "@id", - "missingornull": "fail", - }, - }}, - wantErr: true, - }, - { - name: "valid config returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": "@id", - }, - }}, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := FilterFieldPayload(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("FilterFieldPayload() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestFilterFieldPayload_Process(t *testing.T) { - type args struct { - config processor.Config - r record.Record - } - tests := []struct { - name string - args args - want record.Record - err error - }{ - { - name: "should return error on invalid condition", - args: args{ - r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": "bar", - }, - }, - }, - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": "////", - "missingornull": "fail", - }, - }, - }, - want: record.Record{}, - err: cerrors.New("invalid XPath expression in 'condition': expression must evaluate to a node-set"), - }, - { - name: "should forward record on condition", - args: args{ - r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": "bar", - }, - }, - }, - config: processor.Config{ - Settings: map[string]string{ - "type": "include", - "condition": "foo", - "missingornull": "fail", - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": "bar", - }, - }, - }, - }, - { - name: "should drop record on condition", - args: args{ - r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": "5", - }, - }, - }, - config: processor.Config{ - Settings: map[string]string{ - "type": "exclude", - "condition": "foo > 1", - }, - }}, - want: record.Record{}, - err: processor.ErrSkipRecord, - }, - { - name: "should drop record on missing key", - args: args{ - r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": "3", - }, - }, - }, - config: processor.Config{ - Settings: map[string]string{ - "type": "exclude", - "condition": "foo > 1", - "exists": "foo", - "missingornull": "exclude", - }, - }}, - want: record.Record{}, - err: processor.ErrSkipRecord, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := FilterFieldPayload(tt.args.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != (tt.err != nil) { - t.Errorf("FilterFieldPayload Error: %s - wanted: %s", err, tt.err) - return - } - if tt.err != nil { - if diff := cmp.Diff(tt.err.Error(), err.Error()); diff != "" { - t.Errorf("FilterFieldPayload() failed: [DIFF] %s", diff) - } - } - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Errorf("failed: %s", diff) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/func_wrapper.go b/pkg/processor/procbuiltin/func_wrapper.go deleted file mode 100644 index 18a870ecd..000000000 --- a/pkg/processor/procbuiltin/func_wrapper.go +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright © 2023 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - - "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/inspector" - "github.com/conduitio/conduit/pkg/record" - "github.com/rs/zerolog" -) - -// FuncWrapper is an adapter allowing use of a function as an Interface. -type FuncWrapper struct { - f func(context.Context, record.Record) (record.Record, error) - inInsp *inspector.Inspector - outInsp *inspector.Inspector -} - -func NewFuncWrapper(f func(context.Context, record.Record) (record.Record, error)) FuncWrapper { - // TODO get logger from config or some other place - cw := zerolog.NewConsoleWriter() - cw.TimeFormat = "2006-01-02T15:04:05+00:00" - zl := zerolog.New(cw).With().Timestamp().Logger() - - return FuncWrapper{ - f: f, - inInsp: inspector.New(log.New(zl), inspector.DefaultBufferSize), - outInsp: inspector.New(log.New(zl), inspector.DefaultBufferSize), - } -} - -func (f FuncWrapper) Process(ctx context.Context, inRec record.Record) (record.Record, error) { - // todo same behavior as in procjs, probably can be enforced - f.inInsp.Send(ctx, inRec) - outRec, err := f.f(ctx, inRec) - if err != nil { - return record.Record{}, err - } - - f.outInsp.Send(ctx, outRec) - return outRec, nil -} - -func (f FuncWrapper) InspectIn(ctx context.Context, id string) *inspector.Session { - return f.inInsp.NewSession(ctx, id) -} - -func (f FuncWrapper) InspectOut(ctx context.Context, id string) *inspector.Session { - return f.outInsp.NewSession(ctx, id) -} - -func (f FuncWrapper) Close() { - f.inInsp.Close() - f.outInsp.Close() -} diff --git a/pkg/processor/procbuiltin/func_wrapper_test.go b/pkg/processor/procbuiltin/func_wrapper_test.go deleted file mode 100644 index b425e6609..000000000 --- a/pkg/processor/procbuiltin/func_wrapper_test.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright © 2023 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "testing" - "time" - - "github.com/conduitio/conduit/pkg/foundation/cchan" - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/record" - "github.com/matryer/is" -) - -func TestFuncWrapper_InspectIn(t *testing.T) { - ctx := context.Background() - wantIn := record.Record{ - Position: record.Position("position-in"), - Metadata: record.Metadata{"meta-key-in": "meta-value-in"}, - Key: record.RawData{Raw: []byte("key-in")}, - Payload: record.Change{After: record.RawData{Raw: []byte("payload-in")}}, - } - - testCases := []struct { - name string - err error - }{ - { - name: "processing ok", - err: nil, - }, - { - name: "processing failed", - err: cerrors.New("shouldn't happen"), - }, - } - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - is := is.New(t) - - underTest := NewFuncWrapper(func(_ context.Context, in record.Record) (record.Record, error) { - return record.Record{}, tc.err - }) - - session := underTest.InspectIn(ctx, "test-id") - _, _ = underTest.Process(ctx, wantIn) - - gotIn, got, err := cchan.ChanOut[record.Record](session.C).RecvTimeout(ctx, 100*time.Millisecond) - is.NoErr(err) - is.True(got) - is.Equal(wantIn, gotIn) - }) - } -} - -func TestFuncWrapper_InspectOut_Ok(t *testing.T) { - ctx := context.Background() - wantOut := record.Record{ - Position: record.Position("position-out"), - Metadata: record.Metadata{"meta-key-out": "meta-value-out"}, - Key: record.RawData{Raw: []byte("key-out")}, - Payload: record.Change{After: record.RawData{Raw: []byte("payload-out")}}, - } - - is := is.New(t) - - underTest := NewFuncWrapper(func(_ context.Context, in record.Record) (record.Record, error) { - return wantOut, nil - }) - - session := underTest.InspectOut(ctx, "test-id") - _, _ = underTest.Process(ctx, record.Record{}) - - gotOut, got, err := cchan.ChanOut[record.Record](session.C).RecvTimeout(ctx, 100*time.Millisecond) - is.NoErr(err) - is.True(got) - is.Equal(wantOut, gotOut) -} - -func TestFuncWrapper_InspectOut_ProcessingFailed(t *testing.T) { - ctx := context.Background() - wantOut := record.Record{ - Position: record.Position("position-out"), - Metadata: record.Metadata{"meta-key-out": "meta-value-out"}, - Key: record.RawData{Raw: []byte("key-out")}, - Payload: record.Change{After: record.RawData{Raw: []byte("payload-out")}}, - } - - is := is.New(t) - - underTest := NewFuncWrapper(func(_ context.Context, in record.Record) (record.Record, error) { - return wantOut, cerrors.New("shouldn't happen") - }) - - session := underTest.InspectOut(ctx, "test-id") - _, _ = underTest.Process(ctx, record.Record{}) - - _, _, err := cchan.ChanOut[record.Record](session.C).RecvTimeout(ctx, 100*time.Millisecond) - is.True(cerrors.Is(err, context.DeadlineExceeded)) -} - -func TestFuncWrapper_Close(t *testing.T) { - ctx := context.Background() - - is := is.New(t) - - underTest := NewFuncWrapper(func(_ context.Context, in record.Record) (record.Record, error) { - return record.Record{}, nil - }) - - in := underTest.InspectIn(ctx, "test-id") - out := underTest.InspectOut(ctx, "test-id") - underTest.Close() - - // incoming records session should be closed - _, got, err := cchan.ChanOut[record.Record](in.C).RecvTimeout(ctx, 100*time.Millisecond) - is.NoErr(err) - is.True(!got) - - // outgoing records session should be closed - _, got, err = cchan.ChanOut[record.Record](out.C).RecvTimeout(ctx, 100*time.Millisecond) - is.NoErr(err) - is.True(!got) -} diff --git a/pkg/processor/procbuiltin/hoistfield.go b/pkg/processor/procbuiltin/hoistfield.go deleted file mode 100644 index 5f8ec9900..000000000 --- a/pkg/processor/procbuiltin/hoistfield.go +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -const ( - hoistFieldKeyProcType = "hoistfieldkey" - hoistFieldPayloadProcType = "hoistfieldpayload" - - hoistFieldConfigField = "field" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(hoistFieldKeyProcType, HoistFieldKey) - processor.GlobalBuilderRegistry.MustRegister(hoistFieldPayloadProcType, HoistFieldPayload) -} - -// HoistFieldKey builds the following processor: -// - If the key is raw and has a schema attached, return an error (not supported yet). -// - If the key is raw and has no schema, transforms it into structured data by -// creating a map with the hoisted field and raw data as the value. -// - If the key is structured, wrap it using the specified field name in a map. -func HoistFieldKey(config processor.Config) (processor.Interface, error) { - return hoistField(hoistFieldKeyProcType, recordKeyGetSetter{}, config) -} - -// HoistFieldPayload builds the same processor as HoistFieldKey, except that -// it operates on the field Record.Payload.After. -func HoistFieldPayload(config processor.Config) (processor.Interface, error) { - return hoistField(hoistFieldPayloadProcType, recordPayloadGetSetter{}, config) -} - -func hoistField( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - var ( - err error - fieldName string - ) - - if fieldName, err = getConfigFieldString(config, hoistFieldConfigField); err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - - return NewFuncWrapper(func(_ context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - - switch d := data.(type) { - case record.RawData: - if d.Schema == nil { - data = record.StructuredData{ - fieldName: d.Raw, - } - } else { - return record.Record{}, cerrors.Errorf("%s: data with schema not supported yet", processorType) // TODO - } - case record.StructuredData: - data = record.StructuredData{ - fieldName: map[string]interface{}(d), - } - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - - r = getSetter.Set(r, data) - return r, nil - }), nil -} diff --git a/pkg/processor/procbuiltin/hoistfield_test.go b/pkg/processor/procbuiltin/hoistfield_test.go deleted file mode 100644 index ecf5afd73..000000000 --- a/pkg/processor/procbuiltin/hoistfield_test.go +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "reflect" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/conduitio/conduit/pkg/record/schema/mock" - "github.com/matryer/is" -) - -func TestHoistFieldKey_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: ""}, - }}, - wantErr: true, - }, { - name: "non-empty field returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: "foo"}, - }}, - wantErr: false, - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := HoistFieldKey(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("HoistFieldKey() error = %v, wantErr = %v", err, tt.wantErr) - return - } - }) - } -} - -func TestHoistFieldKey_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data", - config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "foo": map[string]interface{}{ - "bar": 123, - "baz": nil, - }, - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "foo": []byte("raw data"), - }, - }, - wantErr: false, - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := HoistFieldKey(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("process() got = %v, want = %v", got, tt.want) - } - }) - } -} - -func TestHoistFieldPayload_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: ""}, - }}, - wantErr: true, - }, { - name: "non-empty field returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: "foo"}, - }}, - wantErr: false, - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := HoistFieldPayload(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("HoistFieldPayload() error = %v, wantErr = %v", err, tt.wantErr) - return - } - }) - } -} - -func TestHoistFieldPayload_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data", - config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": map[string]interface{}{ - "bar": 123, - "baz": nil, - }, - }, - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": []byte("raw data"), - }, - }, - }, - wantErr: false, - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{hoistFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := HoistFieldPayload(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("process() got = %v, want = %v", got, tt.want) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/httprequest.go b/pkg/processor/procbuiltin/httprequest.go deleted file mode 100644 index e3be2e91c..000000000 --- a/pkg/processor/procbuiltin/httprequest.go +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "bytes" - "context" - "io" - "net/http" - "net/url" - "time" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/goccy/go-json" - "github.com/jpillora/backoff" -) - -const ( - httpRequestProcType = "httprequest" - - httpRequestConfigURL = "url" - httpRequestConfigMethod = "method" - httpRequestConfigContentType = "contentType" - httpRequestContentTypeDefault = "application/json" - httpRequestBackoffRetryCount = "backoffRetry.count" - httpRequestBackoffRetryMin = "backoffRetry.min" - httpRequestBackoffRetryMax = "backoffRetry.max" - httpRequestBackoffRetryFactor = "backoffRetry.factor" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(httpRequestProcType, HTTPRequest) -} - -// HTTPRequest builds a processor that sends an HTTP request to the specified URL with the specified HTTP method -// (default is POST) with a content-type header as the specified value (default is application/json). the whole -// record as json will be used as the request body and the raw response body will be set under Record.Payload.After. -// if the response code is (204 No Content) then the record will be filtered out. -func HTTPRequest(config processor.Config) (processor.Interface, error) { - return httpRequest(httpRequestProcType, config) -} - -func httpRequest( - processorType string, - config processor.Config, -) (processor.Interface, error) { - var ( - err error - rawURL string - method string - ) - - if rawURL, err = getConfigFieldString(config, httpRequestConfigURL); err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - - _, err = url.Parse(rawURL) - if err != nil { - return nil, cerrors.Errorf("%s: error trying to parse url: %w", processorType, err) - } - - method = config.Settings[httpRequestConfigMethod] - if method == "" { - method = http.MethodPost - } - contentType := config.Settings[httpRequestConfigContentType] - if contentType == "" { - contentType = httpRequestContentTypeDefault - } - - // preflight check - _, err = http.NewRequest( - method, - rawURL, - bytes.NewReader([]byte{}), - ) - if err != nil { - return nil, cerrors.Errorf("%s: error trying to create HTTP request: %w", processorType, err) - } - - procFn := func(ctx context.Context, r record.Record) (record.Record, error) { - jsonRec, err := json.Marshal(r) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: error creating json record: %w", processorType, err) - } - - req, err := http.NewRequestWithContext( - ctx, - method, - rawURL, - bytes.NewReader(jsonRec), - ) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: error trying to create HTTP request: %w", processorType, err) - } - - req.Header.Set("Content-Type", contentType) - - resp, err := http.DefaultClient.Do(req) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: error trying to execute HTTP request: %w", processorType, err) - } - defer resp.Body.Close() - - body, err := io.ReadAll(resp.Body) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: error trying to read response body: %w", processorType, err) - } - - if resp.StatusCode > 299 { - // regard status codes over 299 as errors - return record.Record{}, cerrors.Errorf("%s: invalid status code %v (body: %q)", processorType, resp.StatusCode, string(body)) - } - // skip if body has no content - if resp.StatusCode == http.StatusNoContent { - return record.Record{}, processor.ErrSkipRecord - } - - r.Payload.After = record.RawData{Raw: body} - return r, nil - } - - return configureHTTPRequestBackoffRetry(processorType, config, procFn) -} - -func configureHTTPRequestBackoffRetry( - processorType string, - config processor.Config, - procFn func(context.Context, record.Record) (record.Record, error), -) (processor.Interface, error) { - // retryCount is a float64 to match the backoff library attempt type - var retryCount float64 - - tmp, err := getConfigFieldInt64(config, httpRequestBackoffRetryCount) - if err != nil && !cerrors.Is(err, errEmptyConfigField) { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - retryCount = float64(tmp) - - if retryCount == 0 { - // no retries configured, just use the plain processor - return NewFuncWrapper(procFn), nil - } - - // default retry values - b := &backoff.Backoff{ - Factor: 2, - Min: time.Millisecond * 100, - Max: time.Second * 5, - } - - min, err := getConfigFieldDuration(config, httpRequestBackoffRetryMin) - if err != nil && !cerrors.Is(err, errEmptyConfigField) { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } else if err == nil { - b.Min = min - } - - max, err := getConfigFieldDuration(config, httpRequestBackoffRetryMax) - if err != nil && !cerrors.Is(err, errEmptyConfigField) { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } else if err == nil { - b.Max = max - } - - factor, err := getConfigFieldFloat64(config, httpRequestBackoffRetryFactor) - if err != nil && !cerrors.Is(err, errEmptyConfigField) { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } else if err == nil { - b.Factor = factor - } - - // wrap processor in a retry loop - return NewFuncWrapper(func(ctx context.Context, r record.Record) (record.Record, error) { - for { - r, err := procFn(ctx, r) - if err != nil && b.Attempt() < retryCount { - // TODO log message that we are retrying, include error cause (we don't have access to a proper logger) - time.Sleep(b.Duration()) - continue - } - b.Reset() // reset for next processor execution - return r, err - } - }), nil -} diff --git a/pkg/processor/procbuiltin/httprequest_test.go b/pkg/processor/procbuiltin/httprequest_test.go deleted file mode 100644 index fecc8e777..000000000 --- a/pkg/processor/procbuiltin/httprequest_test.go +++ /dev/null @@ -1,355 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "io" - "net/http" - "net/http/httptest" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/goccy/go-json" - "github.com/matryer/is" -) - -func TestHTTPRequest_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty url returns error", - args: args{config: processor.Config{ - Settings: map[string]string{httpRequestConfigURL: ""}, - }}, - wantErr: true, - }, { - name: "invalid url returns error", - args: args{config: processor.Config{ - Settings: map[string]string{httpRequestConfigURL: ":not/a/valid/url"}, - }}, - wantErr: true, - }, { - name: "invalid method returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: "http://example.com", - httpRequestConfigMethod: ":foo", - }, - }}, - wantErr: true, - }, { - name: "invalid backoffRetry.count returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: "http://example.com", - httpRequestBackoffRetryCount: "not-a-number", - }, - }}, - wantErr: true, - }, { - name: "invalid backoffRetry.min returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: "http://example.com", - httpRequestBackoffRetryCount: "1", - httpRequestBackoffRetryMin: "not-a-duration", - }, - }}, - wantErr: true, - }, { - name: "invalid backoffRetry.max returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: "http://example.com", - httpRequestBackoffRetryCount: "1", - httpRequestBackoffRetryMax: "not-a-duration", - }, - }}, - wantErr: true, - }, { - name: "invalid backoffRetry.factor returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: "http://example.com", - httpRequestBackoffRetryCount: "1", - httpRequestBackoffRetryFactor: "not-a-number", - }, - }}, - wantErr: true, - }, { - name: "valid url returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{httpRequestConfigURL: "http://example.com"}, - }}, - wantErr: false, - }, { - name: "valid url and method returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: "http://example.com", - httpRequestConfigMethod: "GET", - }, - }}, - wantErr: false, - }, { - name: "invalid backoff retry config is ignored", - args: args{config: processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: "http://example.com", - httpRequestBackoffRetryMin: "not-a-duration", - httpRequestBackoffRetryMax: "not-a-duration", - httpRequestBackoffRetryFactor: "not-a-number", - }, - }}, - wantErr: false, - }, { - name: "valid url, method and backoff retry config returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: "http://example.com", - httpRequestBackoffRetryCount: "1", - httpRequestBackoffRetryMin: "10ms", - httpRequestBackoffRetryMax: "1s", - httpRequestBackoffRetryFactor: "1.3", - httpRequestConfigContentType: "application/json", - }, - }}, - wantErr: false, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := HTTPRequest(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("HTTPRequest() error = %v, wantErr = %v", err, tt.wantErr) - } - }) - } -} - -func TestHTTPRequest_Success(t *testing.T) { - respBody := []byte("foo-bar/response") - - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - }{{ - name: "structured data", - config: processor.Config{ - Settings: map[string]string{httpRequestConfigMethod: "GET"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: respBody}, - }, - }, - }, { - name: "raw data", - config: processor.Config{ - Settings: map[string]string{}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: []byte("random data")}, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: respBody}, - }, - }, - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - wantMethod := tt.config.Settings[httpRequestConfigMethod] - if wantMethod == "" { - wantMethod = "POST" // default - } - - wantBody, err := json.Marshal(tt.args.r) - is.NoErr(err) - - srv := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { - is.Equal(wantMethod, req.Method) - - gotBody, err := io.ReadAll(req.Body) - is.NoErr(err) - is.Equal(wantBody, gotBody) - - _, err = resp.Write(respBody) - is.NoErr(err) - })) - defer srv.Close() - - tt.config.Settings[httpRequestConfigURL] = srv.URL - underTest, err := HTTPRequest(tt.config) - is.NoErr(err) - - got, err := underTest.Process(context.Background(), tt.args.r) - is.NoErr(err) - is.Equal(got.Payload.After, record.RawData{Raw: respBody}) - }) - } -} - -func TestHTTPRequest_RetrySuccess(t *testing.T) { - is := is.New(t) - - respBody := []byte("foo-bar/response") - - wantMethod := "POST" - rec := record.Record{Payload: record.Change{After: record.RawData{Raw: []byte("random data")}}} - wantBody, err := json.Marshal(rec) - is.NoErr(err) - - srvHandlerCount := 0 - - srv := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { - srvHandlerCount++ - - is.Equal(wantMethod, req.Method) - - gotBody, err := io.ReadAll(req.Body) - is.NoErr(err) - is.Equal(wantBody, gotBody) - - if srvHandlerCount < 5 { - // first 4 requests will fail with an internal server error - resp.WriteHeader(http.StatusInternalServerError) - } else { - _, err := resp.Write(respBody) - is.NoErr(err) - } - })) - defer srv.Close() - - config := processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: srv.URL, - httpRequestBackoffRetryCount: "4", - httpRequestBackoffRetryMin: "5ms", - httpRequestBackoffRetryMax: "10ms", - httpRequestBackoffRetryFactor: "1.2", - }, - } - - underTest, err := HTTPRequest(config) - is.NoErr(err) - - got, err := underTest.Process(context.Background(), rec) - is.NoErr(err) - is.Equal(got.Payload.After, record.RawData{Raw: respBody}) - is.Equal(srvHandlerCount, 5) -} - -func TestHTTPRequest_RetryFail(t *testing.T) { - is := is.New(t) - - srvHandlerCount := 0 - - srv := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { - srvHandlerCount++ - // all requests fail - resp.WriteHeader(http.StatusInternalServerError) - })) - defer srv.Close() - - config := processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: srv.URL, - httpRequestBackoffRetryCount: "5", - httpRequestBackoffRetryMin: "5ms", - httpRequestBackoffRetryMax: "10ms", - httpRequestBackoffRetryFactor: "1.2", - }, - } - - underTest, err := HTTPRequest(config) - is.NoErr(err) - - got, err := underTest.Process(context.Background(), record.Record{Payload: record.Change{After: record.RawData{}}}) - is.True(err != nil) // expected an error - is.Equal(got, record.Record{}) - is.Equal(srvHandlerCount, 6) // expected 6 requests (1 regular and 5 retries) -} - -func TestHTTPRequest_FilterRecord(t *testing.T) { - is := is.New(t) - - wantMethod := "POST" - rec := record.Record{Payload: record.Change{After: record.RawData{Raw: []byte("random data")}}} - wantBody, err := json.Marshal(rec) - is.NoErr(err) - - srv := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) { - is.Equal(wantMethod, req.Method) - - gotBody, err := io.ReadAll(req.Body) - is.NoErr(err) - is.Equal(wantBody, gotBody) - - resp.WriteHeader(http.StatusNoContent) - })) - defer srv.Close() - - config := processor.Config{ - Settings: map[string]string{ - httpRequestConfigURL: srv.URL, - }, - } - - underTest, err := HTTPRequest(config) - is.NoErr(err) - - got, err := underTest.Process(context.Background(), rec) - is.Equal(err, processor.ErrSkipRecord) - is.Equal(got, record.Record{}) -} diff --git a/pkg/processor/procbuiltin/insertfield.go b/pkg/processor/procbuiltin/insertfield.go deleted file mode 100644 index bb8f21584..000000000 --- a/pkg/processor/procbuiltin/insertfield.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -const ( - insertFieldKeyProcType = "insertfieldkey" - insertFieldPayloadProcType = "insertfieldpayload" - - insertFieldConfigStaticField = "static.field" - insertFieldConfigStaticValue = "static.value" - insertFieldConfigPositionField = "position.field" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(insertFieldKeyProcType, InsertFieldKey) - processor.GlobalBuilderRegistry.MustRegister(insertFieldPayloadProcType, InsertFieldPayload) -} - -// InsertFieldKey builds the following processor: -// - If the key is raw, return an error (not supported yet). -// - If the key is structured, set the field(s) in the key data. -func InsertFieldKey(config processor.Config) (processor.Interface, error) { - return insertField(insertFieldKeyProcType, recordKeyGetSetter{}, config) -} - -// InsertFieldPayload builds the same processor as InsertFieldKey, except that -// it operates on the field Record.Payload.After. -func InsertFieldPayload(config processor.Config) (processor.Interface, error) { - return insertField(insertFieldPayloadProcType, recordPayloadGetSetter{}, config) -} - -func insertField( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - var ( - err error - - staticFieldName string - staticFieldValue string - positionField string - ) - - positionField = config.Settings[insertFieldConfigPositionField] - staticFieldName, ok := config.Settings[insertFieldConfigStaticField] - if ok { - if staticFieldValue, err = getConfigFieldString(config, insertFieldConfigStaticValue); err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - } - if staticFieldName == "" && positionField == "" { - return nil, cerrors.Errorf("%s: no fields configured to be inserted", processorType) - } - - return NewFuncWrapper(func(_ context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - - switch d := data.(type) { - case record.RawData: - if d.Schema == nil { - return record.Record{}, cerrors.Errorf("%s: schemaless raw data not supported", processorType) - } - return record.Record{}, cerrors.Errorf("%s: data with schema not supported yet", processorType) // TODO - case record.StructuredData: - // TODO add support for nested fields - if staticFieldName != "" { - d[staticFieldName] = staticFieldValue - } - if positionField != "" { - d[positionField] = r.Position - } - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - - r = getSetter.Set(r, data) - return r, nil - }), nil -} diff --git a/pkg/processor/procbuiltin/insertfield_test.go b/pkg/processor/procbuiltin/insertfield_test.go deleted file mode 100644 index 054c02e0d..000000000 --- a/pkg/processor/procbuiltin/insertfield_test.go +++ /dev/null @@ -1,521 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "reflect" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/conduitio/conduit/pkg/record/schema/mock" - "github.com/matryer/is" -) - -func TestInsertFieldKey_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "static field without static value returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "", - }, - }}, - wantErr: true, - }, { - name: "static field with empty static value returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "", - }, - }}, - wantErr: true, - }, { - name: "static field with static value returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "bar", - }, - }}, - wantErr: false, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := InsertFieldKey(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("InsertFieldKey() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestInsertFieldKey_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "static field in structured data", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "bar", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "bar": 123, - "baz": nil, - "foo": "bar", - }, - }, - wantErr: false, - }, { - name: "static field in raw data without schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "bar", - }, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }}, - wantErr: true, // not supported - }, { - name: "static field in raw data with schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "bar", - }, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }, { - name: "position in structured data", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigPositionField: "foo", - }, - }, - args: args{r: record.Record{ - Position: record.Position("3"), - Key: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }}, - want: record.Record{ - Position: record.Position("3"), - Key: record.StructuredData{ - "bar": 123, - "baz": nil, - "foo": record.Position("3"), - }, - }, - wantErr: false, - }, { - name: "position in raw data without schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigPositionField: "foo", - }, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }}, - wantErr: true, // not supported - }, { - name: "position in raw data with schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigPositionField: "foo", - }, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }, { - name: "all fields in structured data", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "fooStatic", - insertFieldConfigStaticValue: "bar", - insertFieldConfigPositionField: "fooPosition", - }, - }, - args: args{r: record.Record{ - Position: record.Position("321"), - Key: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }}, - want: record.Record{ - Position: record.Position("321"), - Key: record.StructuredData{ - "bar": 123, - "baz": nil, - "fooStatic": "bar", - "fooPosition": record.Position("321"), - }, - }, - wantErr: false, - }, { - name: "all fields in raw data with schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "fooStatic", - insertFieldConfigStaticValue: "bar", - insertFieldConfigPositionField: "fooPosition", - }, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := InsertFieldKey(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("process() got = %v, want = %v", got, tt.want) - } - }) - } -} - -func TestInsertFieldPayload_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "static field without static value returns error", - args: args{config: processor.Config{ - Settings: map[string]string{insertFieldConfigStaticField: ""}, - }}, - wantErr: true, - }, { - name: "static field with empty static value returns error", - args: args{config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "", - }, - }}, - wantErr: true, - }, { - name: "static field with static value returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "bar", - }, - }}, - wantErr: false, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := InsertFieldPayload(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("InsertFieldPayload() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestInsertFieldPayload_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "static field in structured data", - config: processor.Config{ - Settings: map[string]string{insertFieldConfigStaticField: "foo", insertFieldConfigStaticValue: "bar"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": nil, - "foo": "bar", - }, - }, - }, - wantErr: false, - }, { - name: "static field in raw data without schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "bar", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }, - }}, - wantErr: true, // not supported - }, { - name: "static field in raw data with schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "foo", - insertFieldConfigStaticValue: "bar", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }, { - name: "position in structured data", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigPositionField: "foo", - }, - }, - args: args{r: record.Record{ - Position: record.Position("3"), - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }, - }}, - want: record.Record{ - Position: record.Position("3"), - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": nil, - "foo": record.Position("3"), - }, - }, - }, - wantErr: false, - }, { - name: "position in raw data without schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigPositionField: "foo", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }, - }}, - wantErr: true, // not supported - }, { - name: "position in raw data with schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigPositionField: "foo", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }, { - name: "all fields in structured data", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "fooStatic", - insertFieldConfigStaticValue: "bar", - insertFieldConfigPositionField: "fooPosition", - }, - }, - args: args{r: record.Record{ - Position: record.Position("321"), - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": nil, - }, - }, - }}, - want: record.Record{ - Position: record.Position("321"), - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 123, - "baz": nil, - "fooStatic": "bar", - "fooPosition": record.Position("321"), - }, - }, - }, - wantErr: false, - }, { - name: "all fields in raw data with schema", - config: processor.Config{ - Settings: map[string]string{ - insertFieldConfigStaticField: "fooStatic", - insertFieldConfigStaticValue: "bar", - insertFieldConfigPositionField: "fooPosition", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := InsertFieldPayload(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("process() got = %v, want = %v", got, tt.want) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/maskfield.go b/pkg/processor/procbuiltin/maskfield.go deleted file mode 100644 index 535d92320..000000000 --- a/pkg/processor/procbuiltin/maskfield.go +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "reflect" - "strconv" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -const ( - maskFieldKeyProcType = "maskfieldkey" - maskFieldPayloadProcType = "maskfieldpayload" - - maskFieldConfigField = "field" - maskFieldConfigReplacement = "replacement" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(maskFieldKeyProcType, MaskFieldKey) - processor.GlobalBuilderRegistry.MustRegister(maskFieldPayloadProcType, MaskFieldPayload) -} - -// MaskFieldKey builds the following processor: -// - If the key is raw, return an error (not supported yet). -// - If the key is structured, replace the field with the zero value of the -// fields type. -func MaskFieldKey(config processor.Config) (processor.Interface, error) { - return maskField(maskFieldKeyProcType, recordKeyGetSetter{}, config) -} - -// MaskFieldPayload builds the same processor as MaskFieldKey, except that -// it operates on the field Record.Payload.After. -func MaskFieldPayload(config processor.Config) (processor.Interface, error) { - return maskField(maskFieldPayloadProcType, recordPayloadGetSetter{}, config) -} - -func maskField( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - var ( - err error - fieldName string - replacement string - ) - - if fieldName, err = getConfigFieldString(config, maskFieldConfigField); err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - replacement = config.Settings[maskFieldConfigReplacement] - - return NewFuncWrapper(func(_ context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - - switch d := data.(type) { - case record.RawData: - if d.Schema == nil { - return record.Record{}, cerrors.Errorf("%s: schemaless raw data not supported", processorType) - } - return record.Record{}, cerrors.Errorf("%s: data with schema not supported yet", processorType) // TODO - case record.StructuredData: - // TODO add support for nested fields - switch d[fieldName].(type) { - case string: - d[fieldName] = replacement - case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64: // any numeric type - // ignore error, i is going to be zero if it fails anyway - i, _ := strconv.Atoi(replacement) - d[fieldName] = i - default: - fieldType := reflect.TypeOf(d[fieldName]) - zeroValue := reflect.New(fieldType).Elem().Interface() - d[fieldName] = zeroValue - } - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - - r = getSetter.Set(r, data) - return r, nil - }), nil -} diff --git a/pkg/processor/procbuiltin/maskfield_test.go b/pkg/processor/procbuiltin/maskfield_test.go deleted file mode 100644 index a4043f3a6..000000000 --- a/pkg/processor/procbuiltin/maskfield_test.go +++ /dev/null @@ -1,350 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/conduitio/conduit/pkg/record/schema/mock" - "github.com/google/go-cmp/cmp" - "github.com/matryer/is" -) - -func TestMaskFieldKey_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: ""}, - }}, - wantErr: true, - }, { - name: "non-empty field returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }}, - wantErr: false, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := MaskFieldKey(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("MaskFieldKey() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestMaskFieldKey_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data int", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "baz": nil, - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "foo": 0, - "baz": nil, - }, - }, - wantErr: false, - }, { - name: "structured data string", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": "sensitive data", - "baz": nil, - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "foo": "", - "baz": nil, - }, - }, - wantErr: false, - }, { - name: "structured data map", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": map[string]interface{}{"bar": "buz"}, - "baz": nil, - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "foo": map[string]interface{}(nil), - "baz": nil, - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }}, - wantErr: true, // not supported - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := MaskFieldKey(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} - -func TestMaskFieldPayload_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: ""}, - }}, - wantErr: true, - }, { - name: "non-empty field returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }}, - wantErr: false, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := MaskFieldPayload(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("MaskFieldPayload() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestMaskFieldPayload_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data int", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "baz": nil, - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 0, - "baz": nil, - }, - }, - }, - wantErr: false, - }, { - name: "structured data string", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": "sensitive data", - "baz": nil, - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": "", - "baz": nil, - }, - }, - }, - wantErr: false, - }, { - name: "structured data map", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": map[string]interface{}{"bar": "buz"}, - "baz": nil, - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": map[string]interface{}(nil), - "baz": nil, - }, - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }, - }}, - wantErr: true, // not supported - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{maskFieldConfigField: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := MaskFieldPayload(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/parsejson.go b/pkg/processor/procbuiltin/parsejson.go deleted file mode 100644 index 5c92e4350..000000000 --- a/pkg/processor/procbuiltin/parsejson.go +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright © 2023 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/goccy/go-json" -) - -const ( - parseJSONKeyProcType = "parsejsonkey" - parseJSONPayloadProcType = "parsejsonpayload" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(parseJSONKeyProcType, ParseJSONKey) - processor.GlobalBuilderRegistry.MustRegister(parseJSONPayloadProcType, ParseJSONPayload) -} - -// ParseJSONKey parses the record key from raw to structured data -func ParseJSONKey(_ processor.Config) (processor.Interface, error) { - return parseJSON(parseJSONKeyProcType, recordKeyGetSetter{}) -} - -// ParseJSONPayload parses the record payload from raw to structured data -func ParseJSONPayload(_ processor.Config) (processor.Interface, error) { - return parseJSON(parseJSONPayloadProcType, recordPayloadGetSetter{}) -} - -func parseJSON( - processorType string, - getSetter recordDataGetSetter, -) (processor.Interface, error) { - return NewFuncWrapper(func(_ context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - - switch data.(type) { - case record.RawData: - var jsonData record.StructuredData - if len(data.Bytes()) == 0 { - // change empty raw data to empty structured data - r = getSetter.Set(r, jsonData) - return r, nil - } - err := json.Unmarshal(data.Bytes(), &jsonData) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: failed to unmarshal raw data as JSON: %w", processorType, err) - } - r = getSetter.Set(r, jsonData) - - case record.StructuredData: - // data is already structured - case nil: - // if the field is nil leave it as it is - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - - return r, nil - }), nil -} diff --git a/pkg/processor/procbuiltin/parsejson_test.go b/pkg/processor/procbuiltin/parsejson_test.go deleted file mode 100644 index 0a938d514..000000000 --- a/pkg/processor/procbuiltin/parsejson_test.go +++ /dev/null @@ -1,196 +0,0 @@ -// Copyright © 2023 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/google/go-cmp/cmp" - "github.com/matryer/is" -) - -func TestParseJSONKey_Process(t *testing.T) { - tests := []struct { - name string - record record.Record - want record.Record - wantErr bool - }{{ - name: "raw key", - record: record.Record{ - Key: record.RawData{ - Raw: []byte("{\"after\":{\"data\":4,\"id\":3}}"), - Schema: nil, - }, - }, - want: record.Record{ - Key: record.StructuredData{ - "after": map[string]interface{}{"data": float64(4), "id": float64(3)}, - }, - }, - wantErr: false, - }, { - name: "already structured key", - record: record.Record{ - Key: record.StructuredData{ - "after": map[string]interface{}{"data": float64(4), "id": float64(3)}, - }, - }, - want: record.Record{ - Key: record.StructuredData{ - "after": map[string]interface{}{"data": float64(4), "id": float64(3)}, - }, - }, - wantErr: false, - }, { - name: "invalid JSON key", - record: record.Record{ - Key: record.RawData{ - Raw: []byte("\"invalid\":\"json\""), - Schema: nil, - }, - }, - wantErr: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := ParseJSONKey(processor.Config{}) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.record) - if (err != nil) != tt.wantErr { - t.Fatalf("process() error = %v, wantErr = %v", err, tt.wantErr) - } - if diff := cmp.Diff(tt.want, got); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} - -func TestParseJSONPayload_Process(t *testing.T) { - tests := []struct { - name string - record record.Record - want record.Record - wantErr bool - }{{ - name: "raw payload", - record: record.Record{ - Payload: record.Change{ - Before: record.RawData{ - Raw: []byte("{\"ignored\":\"true\"}"), - Schema: nil, - }, - After: record.RawData{ - Raw: []byte("{\"after\":{\"data\":4,\"id\":3}}"), - Schema: nil, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: record.RawData{ - Raw: []byte("{\"ignored\":\"true\"}"), - Schema: nil, - }, - After: record.StructuredData{ - "after": map[string]interface{}{"data": float64(4), "id": float64(3)}, - }, - }, - }, - wantErr: false, - }, { - name: "already structured payload", - record: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "after": map[string]interface{}{"data": float64(4), "id": float64(3)}, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "after": map[string]interface{}{"data": float64(4), "id": float64(3)}, - }, - }, - }, - wantErr: false, - }, { - name: "nil after", - record: record.Record{ - Payload: record.Change{ - Before: nil, - After: nil, - }, - }, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: nil, - }, - }, - wantErr: false, - }, { - name: "invalid JSON payload", - record: record.Record{ - Payload: record.Change{ - After: record.RawData{ - Raw: []byte("\"invalid\":\"true\""), - Schema: nil, - }, - }}, - wantErr: true, - }, { - name: "empty raw data parsed into empty structured data", - record: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{}, - }, - }, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData(nil), - }, - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := ParseJSONPayload(processor.Config{}) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.record) - if (err != nil) != tt.wantErr { - t.Fatalf("process() error = %v, wantErr = %v", err, tt.wantErr) - } - if diff := cmp.Diff(tt.want, got); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/replacefield.go b/pkg/processor/procbuiltin/replacefield.go deleted file mode 100644 index 6ead1fc8b..000000000 --- a/pkg/processor/procbuiltin/replacefield.go +++ /dev/null @@ -1,148 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "strings" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -const ( - replaceFieldKeyProcType = "replacefieldkey" - replaceFieldPayloadProcType = "replacefieldpayload" - - replaceFieldConfigExclude = "exclude" - replaceFieldConfigInclude = "include" - replaceFieldConfigRename = "rename" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(replaceFieldKeyProcType, ReplaceFieldKey) - processor.GlobalBuilderRegistry.MustRegister(replaceFieldPayloadProcType, ReplaceFieldKey) -} - -// ReplaceFieldKey builds a processor which replaces a field in a structured key. -// Raw data is not supported. The processor can be controlled by 3 variables: -// - "exclude" - is a comma separated list of fields that should be excluded -// from the processed record ("exclude" takes precedence over "include"). -// - "include" - is a comma separated list of fields that should be included -// in the processed record. -// - "rename" - is a comma separated list of pairs separated by colons, that -// controls the mapping of old field names to new field names. -// -// If "include" is not configured or is empty then all fields in the record will -// be included by default (except if they are configured in "exclude"). -// If "include" is not empty, then all fields are excluded by default and only -// fields in "include" will be added to the processed record. -func ReplaceFieldKey(config processor.Config) (processor.Interface, error) { - return replaceField(replaceFieldKeyProcType, recordKeyGetSetter{}, config) -} - -// ReplaceFieldPayload builds the same processor as ReplaceFieldKey, except that -// it operates on the field Record.Payload.After. -func ReplaceFieldPayload(config processor.Config) (processor.Interface, error) { - return replaceField(replaceFieldPayloadProcType, recordPayloadGetSetter{}, config) -} - -func replaceField( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - var ( - exclude string - include string - rename string - - excludeMap = make(map[string]bool) - includeMap = make(map[string]bool) - renameMap = make(map[string]string) - ) - - exclude = config.Settings[replaceFieldConfigExclude] - include = config.Settings[replaceFieldConfigInclude] - rename = config.Settings[replaceFieldConfigRename] - - if exclude == "" && include == "" && rename == "" { - return nil, cerrors.Errorf( - "%s: config must include at least one of [%s %s %s]", - processorType, - replaceFieldConfigExclude, - replaceFieldConfigInclude, - replaceFieldConfigRename, - ) - } - - if rename != "" { - pairs := strings.Split(rename, ",") - for _, pair := range pairs { - tokens := strings.Split(pair, ":") - if len(tokens) != 2 { - return nil, cerrors.Errorf( - "%s: config field %q contains invalid value %q, expected format is \"foo:c1,bar:c2\"", - processorType, - replaceFieldConfigRename, - rename, - ) - } - renameMap[tokens[0]] = tokens[1] - } - } - if exclude != "" { - excludeList := strings.Split(exclude, ",") - for _, v := range excludeList { - excludeMap[v] = true - } - } - if include != "" { - includeList := strings.Split(include, ",") - for _, v := range includeList { - includeMap[v] = true - } - } - - return NewFuncWrapper(func(_ context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - - switch d := data.(type) { - case record.RawData: - if d.Schema == nil { - return record.Record{}, cerrors.Errorf("%s: schemaless raw data not supported", processorType) - } - return record.Record{}, cerrors.Errorf("%s: data with schema not supported yet", processorType) // TODO - case record.StructuredData: - // TODO add support for nested fields - for field, value := range d { - if excludeMap[field] || (len(includeMap) != 0 && !includeMap[field]) { - delete(d, field) - continue - } - if newField, ok := renameMap[field]; ok { - delete(d, field) - d[newField] = value - } - } - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - - r = getSetter.Set(r, data) - return r, nil - }), nil -} diff --git a/pkg/processor/procbuiltin/replacefield_test.go b/pkg/processor/procbuiltin/replacefield_test.go deleted file mode 100644 index 2d7269ead..000000000 --- a/pkg/processor/procbuiltin/replacefield_test.go +++ /dev/null @@ -1,639 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/conduitio/conduit/pkg/record/schema/mock" - "github.com/google/go-cmp/cmp" - "github.com/matryer/is" -) - -func TestReplaceFieldKey_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty exclude returns error", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: ""}, - }}, - wantErr: true, - }, { - name: "empty include returns error", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigInclude: ""}, - }}, - wantErr: true, - }, { - name: "empty rename returns error", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigRename: ""}, - }}, - wantErr: true, - }, { - name: "invalid rename returns error", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigRename: "foo,bar"}, - }}, - wantErr: true, - }, { - name: "non-empty exclude returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: "foo"}, - }}, - wantErr: false, - }, { - name: "non-empty include returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigInclude: "foo"}, - }}, - wantErr: false, - }, { - name: "valid rename returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigRename: "foo:c1,bar:c2"}, - }}, - wantErr: false, - }, { - name: "non-empty all fields returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigExclude: "foo", - replaceFieldConfigInclude: "bar", - replaceFieldConfigRename: "foo:c1,bar:c2"}, - }}, - wantErr: false, - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := ReplaceFieldKey(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("ReplaceFieldKey() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestReplaceFieldKey_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data exclude", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: "foo,bar"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "baz": []byte("123"), - }, - }, - wantErr: false, - }, { - name: "structured data include", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigInclude: "foo,baz"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "baz": []byte("123"), - }, - }, - wantErr: false, - }, { - name: "structured data rename", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigRename: "foo:c1,bar:c2"}, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "c1": 123, - "c2": 1.2, - "baz": []byte("123"), - }, - }, - wantErr: false, - }, { - name: "structured data exclude and rename", - config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigExclude: "foo,baz", - replaceFieldConfigRename: "foo:c1,bar:c2", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "c2": 1.2, - }, - }, - wantErr: false, - }, { - name: "structured data include and rename", - config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigInclude: "foo,baz", - replaceFieldConfigRename: "foo:c1,bar:c2", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "c1": 123, - "baz": []byte("123"), - }, - }, - wantErr: false, - }, { - name: "structured data exclude and include", - config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigExclude: "foo,baz", - replaceFieldConfigInclude: "baz,bar", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - "other": "something", - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "bar": 1.2, - }, - }, - wantErr: false, - }, { - name: "structured data exclude, include and rename", - config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigExclude: "foo,baz", - replaceFieldConfigInclude: "baz,bar", - replaceFieldConfigRename: "foo:c1,bar:c2,other:asdf", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - "other": "something", - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "c2": 1.2, - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: "foo"}, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }}, - wantErr: true, // not supported - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: "foo"}, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := ReplaceFieldKey(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} - -func TestReplaceFieldPayload_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty exclude returns error", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: ""}, - }}, - wantErr: true, - }, { - name: "empty include returns error", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigInclude: ""}, - }}, - wantErr: true, - }, { - name: "empty rename returns error", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigRename: ""}, - }}, - wantErr: true, - }, { - name: "invalid rename returns error", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigRename: "foo,bar"}, - }}, - wantErr: true, - }, { - name: "non-empty exclude returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: "foo"}, - }}, - wantErr: false, - }, { - name: "non-empty include returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigInclude: "foo"}, - }}, - wantErr: false, - }, { - name: "valid rename returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{replaceFieldConfigRename: "foo:c1,bar:c2"}, - }}, - wantErr: false, - }, { - name: "non-empty all fields returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigExclude: "foo", - replaceFieldConfigInclude: "bar", - replaceFieldConfigRename: "foo:c1,bar:c2", - }, - }}, - wantErr: false, - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := ReplaceFieldPayload(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("ReplaceFieldPayload() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestReplaceFieldPayload_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data exclude", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: "foo,bar"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "baz": []byte("123"), - }, - }, - }, - wantErr: false, - }, { - name: "structured data include", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigInclude: "foo,baz"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "baz": []byte("123"), - }, - }, - }, - wantErr: false, - }, { - name: "structured data rename", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigRename: "foo:c1,bar:c2"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "c1": 123, - "c2": 1.2, - "baz": []byte("123"), - }, - }, - }, - wantErr: false, - }, { - name: "structured data exclude and rename", - config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigExclude: "foo,baz", - replaceFieldConfigRename: "foo:c1,bar:c2", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "c2": 1.2, - }, - }, - }, - wantErr: false, - }, { - name: "structured data include and rename", - config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigInclude: "foo,baz", - replaceFieldConfigRename: "foo:c1,bar:c2", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "c1": 123, - "baz": []byte("123"), - }, - }, - }, - wantErr: false, - }, { - name: "structured data exclude and include", - config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigExclude: "foo,baz", - replaceFieldConfigInclude: "baz,bar", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - "other": "something", - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "bar": 1.2, - }, - }, - }, - wantErr: false, - }, { - name: "structured data exclude, include and rename", - config: processor.Config{ - Settings: map[string]string{ - replaceFieldConfigExclude: "foo,baz", - replaceFieldConfigInclude: "baz,bar", - replaceFieldConfigRename: "foo:c1,bar:c2,other:asdf", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": 1.2, - "baz": []byte("123"), - "other": "something", - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "c2": 1.2, - }, - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }, - }}, - wantErr: true, // not supported - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{replaceFieldConfigExclude: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := ReplaceFieldPayload(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/timestampconverter.go b/pkg/processor/procbuiltin/timestampconverter.go deleted file mode 100644 index 3d94c710b..000000000 --- a/pkg/processor/procbuiltin/timestampconverter.go +++ /dev/null @@ -1,138 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "time" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -const ( - timestampConverterKeyProcType = "timestampconverterkey" - timestampConverterPayloadProcType = "timestampconverterpayload" - - timestampConverterConfigTargetType = "target.type" - timestampConverterConfigField = "date" - timestampConverterConfigFormat = "format" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(timestampConverterKeyProcType, TimestampConverterKey) - processor.GlobalBuilderRegistry.MustRegister(timestampConverterPayloadProcType, TimestampConverterPayload) -} - -// TimestampConverterKey builds a processor which converts a timestamp in a field in the key -// into a different type. The supported types are: -// - "string" -// - "unix" -// - "time.Time". -// -// Any combination of the supported types is possible. For example, it's possible -// to convert from a Unix timestamp to Go's time.Time or to convert from a string -// to a Unix timestamp. -// -// The processor supports only structured data. -func TimestampConverterKey(config processor.Config) (processor.Interface, error) { - return timestampConverter(timestampConverterKeyProcType, recordKeyGetSetter{}, config) -} - -// TimestampConverterPayload builds the same processor as TimestampConverterKey, except that -// it operates on the field Record.Payload.After. -func TimestampConverterPayload(config processor.Config) (processor.Interface, error) { - return timestampConverter(timestampConverterPayloadProcType, recordPayloadGetSetter{}, config) -} - -func timestampConverter( - processorType string, - getSetter recordDataGetSetter, - config processor.Config, -) (processor.Interface, error) { - const ( - stringType = "string" - unixType = "unix" - timeType = "time.Time" - ) - - var ( - err error - targetType string - field string - format string - ) - - // if field is empty then input is raw data - if field, err = getConfigFieldString(config, timestampConverterConfigField); err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - if targetType, err = getConfigFieldString(config, timestampConverterConfigTargetType); err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - if targetType != stringType && targetType != unixType && targetType != timeType { - return nil, cerrors.Errorf("%s: targetType (%s) is not supported", processorType, targetType) - } - format = config.Settings[timestampConverterConfigFormat] // can be empty - if format == "" && targetType == stringType { - return nil, cerrors.Errorf("%s: format is needed to parse the output", processorType) - } - - return NewFuncWrapper(func(_ context.Context, r record.Record) (record.Record, error) { - data := getSetter.Get(r) - switch d := data.(type) { - case record.RawData: - if d.Schema == nil { - return record.Record{}, cerrors.Errorf("%s: schemaless raw data not supported", processorType) - } - return record.Record{}, cerrors.Errorf("%s: data with schema not supported yet", processorType) // TODO - case record.StructuredData: - var tm time.Time - switch v := d[field].(type) { - case int64: - tm = time.Unix(0, v).UTC() - case string: - if format == "" { - return record.Record{}, cerrors.Errorf("%s: no format to parse the date", processorType) - } - tm, err = time.Parse(format, v) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: %w", processorType, err) - } - case time.Time: - tm = v - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, d[field]) - } - // TODO add support for nested fields - switch targetType { - case stringType: // use "format" to generate the output - d[field] = tm.Format(format) - case unixType: - d[field] = tm.UnixNano() - case timeType: - d[field] = tm - default: - return record.Record{}, cerrors.Errorf("%s: unexpected output type %T", processorType, targetType) - } - default: - return record.Record{}, cerrors.Errorf("%s: unexpected data type %T", processorType, data) - } - - r = getSetter.Set(r, data) - return r, nil - }), nil -} diff --git a/pkg/processor/procbuiltin/timestampconverter_test.go b/pkg/processor/procbuiltin/timestampconverter_test.go deleted file mode 100644 index 0bf3deeb4..000000000 --- a/pkg/processor/procbuiltin/timestampconverter_test.go +++ /dev/null @@ -1,619 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "testing" - "time" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/conduitio/conduit/pkg/record/schema/mock" - "github.com/google/go-cmp/cmp" - "github.com/matryer/is" -) - -func TestTimestampConverterKey_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{timestampConverterConfigField: ""}, - }}, - wantErr: true, - }, { - name: "empty format returns error when targetType is string", - args: args{config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "foo", - timestampConverterConfigTargetType: "string"}, - }}, - wantErr: true, - }, { - name: "unix target type doesn't require a format", - args: args{config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "foo", - timestampConverterConfigTargetType: "unix", - }, - }}, - wantErr: false, - }, { - name: "time.Time target type doesn't require a format, unless input type is string", - args: args{config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "foo", - timestampConverterConfigTargetType: "time.Time", - timestampConverterConfigFormat: "2016-01-02", - }, - }}, - wantErr: false, - }, { - name: "string targetType needs a format", - args: args{config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "foo", - timestampConverterConfigTargetType: "string", - timestampConverterConfigFormat: "2016-01-02", - }, - }}, - wantErr: false, - }} - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := TimestampConverterKey(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("TimestampConverterKey() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestTimestampConverterKey_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "from unix to string", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "string", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "date": int64(1621382400000000000), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "date": "2021-05-19", - }, - }, - wantErr: false, - }, { - name: "from time.Time to string", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "string", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "date": time.Date(2021, time.May, 19, 0, 0, 0, 0, time.UTC), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "date": "2021-05-19", - }, - }, - wantErr: false, - }, { - name: "from time.Time to unix", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "unix", - timestampConverterConfigFormat: "", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "date": time.Date(2021, time.May, 19, 0, 0, 0, 0, time.UTC), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "date": int64(1621382400000000000), - }, - }, - wantErr: false, - }, { - name: "from string to unix", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "unix", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "date": "2021-05-19", - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "date": int64(1621382400000000000), - }, - }, - wantErr: false, - }, { - name: "from string to time.Time", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "time.Time", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "date": "2021-05-19", - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "date": time.Date(2021, time.May, 19, 0, 0, 0, 0, time.UTC), - }, - }, - wantErr: false, - }, { - name: "from string to time.Time with empty format should throw error", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "time.Time", - timestampConverterConfigFormat: "", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "date": "2021-05-19", - }, - }}, - want: record.Record{}, - wantErr: true, - }, { - name: "from string to unix with empty format should throw error", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "unix", - timestampConverterConfigFormat: "", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "date": "2021-05-19", - }, - }}, - want: record.Record{}, - wantErr: true, - }, { - name: "from unix to time.Time", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "time.Time", - timestampConverterConfigFormat: "", - }, - }, - args: args{r: record.Record{ - Key: record.StructuredData{ - "date": int64(1621382400000000000), - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "date": time.Date(2021, time.May, 19, 0, 0, 0, 0, time.UTC), - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "string", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }}, - wantErr: true, // not supported - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "foo", - timestampConverterConfigTargetType: "unix", - }, - }, - args: args{r: record.Record{ - Key: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := TimestampConverterKey(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} - -func TestTimestampConverterPayload_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{timestampConverterConfigField: ""}, - }}, - wantErr: true, - }, { - name: "empty format returns error when targetType is string", - args: args{config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "foo", - timestampConverterConfigTargetType: "string", - }, - }}, - wantErr: true, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := TimestampConverterPayload(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("TimestampConverterPayload() error = %v, wantErr %v", err, tt.wantErr) - return - } - }) - } -} - -func TestTimestampConverterPayload_Process(t *testing.T) { - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "from unix to string", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "string", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": int64(1621382400000000000), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": "2021-05-19", - }, - }, - }, - wantErr: false, - }, { - name: "from time.Time to string", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "string", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": time.Date(2021, time.May, 19, 0, 0, 0, 0, time.UTC), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": "2021-05-19", - }, - }, - }, - wantErr: false, - }, { - name: "from time.Time to unix", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "unix", - timestampConverterConfigFormat: "", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": time.Date(2021, time.May, 19, 0, 0, 0, 0, time.UTC), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": int64(1621382400000000000), - }, - }, - }, - wantErr: false, - }, { - name: "from string to unix", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "unix", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": "2021-05-19", - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": int64(1621382400000000000), - }, - }, - }, - wantErr: false, - }, { - name: "from string to time.Time", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "time.Time", - timestampConverterConfigFormat: "2006-01-02", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": "2021-05-19", - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": time.Date(2021, time.May, 19, 0, 0, 0, 0, time.UTC), - }, - }, - }, - wantErr: false, - }, { - name: "from string to time.Time with empty format should throw error", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "time.Time", - timestampConverterConfigFormat: "", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": "2021-05-19", - }, - }, - }}, - want: record.Record{}, - wantErr: true, - }, { - name: "from string to unix with empty format should throw error", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "unix", - timestampConverterConfigFormat: "", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": "2021-05-19", - }, - }, - }}, - want: record.Record{}, - wantErr: true, - }, { - name: "from unix to time.Time", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "time.Time", - timestampConverterConfigFormat: "", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": int64(1621382400000000000), - }, - }, - }}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "date": time.Date(2021, time.May, 19, 0, 0, 0, 0, time.UTC), - }, - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "date", - timestampConverterConfigTargetType: "string", - timestampConverterConfigFormat: "2006-01-02"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }, - }}, - wantErr: true, // not supported - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{ - timestampConverterConfigField: "foo", - timestampConverterConfigTargetType: "unix", - }, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := TimestampConverterPayload(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(got, tt.want); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/unwrap.go b/pkg/processor/procbuiltin/unwrap.go deleted file mode 100644 index 69c76646a..000000000 --- a/pkg/processor/procbuiltin/unwrap.go +++ /dev/null @@ -1,571 +0,0 @@ -// Copyright © 2023 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "encoding/base64" - "fmt" - "time" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/foundation/multierror" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/goccy/go-json" -) - -type unwrapProcessor struct { - unwrapper unwrapper -} - -// unwrapper unwraps the formatted record from the openCDC record -type unwrapper interface { - // Unwrap gets the unwrapped record - Unwrap(record.Record) (record.Record, error) -} - -const ( - unwrapProcType = "unwrap" - - unwrapConfigFormat = "format" - - FormatDebezium = "debezium" - FormatKafkaConnect = "kafka-connect" - FormatOpenCDC = "opencdc" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(unwrapProcType, Unwrap) -} - -func Unwrap(config processor.Config) (processor.Interface, error) { - if _, ok := config.Settings[unwrapConfigFormat]; !ok { - return nil, cerrors.Errorf("%s: %q config not specified", unwrapProcType, unwrapConfigFormat) - } - format := config.Settings[unwrapConfigFormat] - proc := &unwrapProcessor{} - switch format { - case FormatDebezium: - proc.unwrapper = &debeziumUnwrapper{} - case FormatKafkaConnect: - proc.unwrapper = &kafkaConnectUnwrapper{} - case FormatOpenCDC: - proc.unwrapper = &openCDCUnwrapper{} - default: - return nil, cerrors.Errorf("%s: %q is not a valid format", unwrapProcType, format) - } - - return NewFuncWrapper(proc.Process), nil -} - -func (p *unwrapProcessor) Process(_ context.Context, in record.Record) (record.Record, error) { - data := in.Payload.After - var structData record.StructuredData - switch d := data.(type) { - case record.RawData: - // todo: take this section out, after platform team support ordering processors - // unmarshal raw data to structured - err := json.Unmarshal(data.Bytes(), &structData) - if err != nil { - return record.Record{}, cerrors.Errorf("failed to unmarshal raw data as JSON: %w", unwrapProcType, err) - } - case record.StructuredData: - structData = d - default: - return record.Record{}, cerrors.Errorf("unexpected data type %T", unwrapProcType, data) - } - // assign the structured data to payload.After - in.Payload.After = structData - - out, err := p.unwrapper.Unwrap(in) - if err != nil { - return record.Record{}, cerrors.Errorf("%s: error unwrapping record: %w", unwrapProcType, err) - } - return out, nil -} - -/* -Example of an OpenCDC record: -{ - "key": "NWQ0N2UwZGQtNTkxYi00MGEyLTk3YzMtYzc1MDY0MWU3NTc1", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028881541916000", - "opencdc.version": "v1" - }, - "operation": "create", - "payload": { - "after": { - "event_id": 2041181862, - "msg": "string 4c88f20f-aa77-4f4b-9354-e4fdb1989a52", - "pg_generator": false, - "sensor_id": 54434691, - "triggered": false - }, - "before": null - }, - "position": "ZWIwNmJiMmMtNWNhMS00YjUyLWE2ZmMtYzc0OTFlZDQ3OTYz" -} -*/ - -// openCDCUnwrapper unwraps an OpenCDC record from the payload, by unmarhsalling rec.Payload.After into type Record. -type openCDCUnwrapper struct{} - -// UnwrapOperation extracts operation from a structuredData record. -func (o *openCDCUnwrapper) UnwrapOperation(structData record.StructuredData) (record.Operation, error) { - var operation record.Operation - op, ok := structData["operation"] - if !ok { - return operation, cerrors.Errorf("record payload after doesn't contain operation") - } - - switch opType := op.(type) { - case record.Operation: - operation = opType - case string: - if err := operation.UnmarshalText([]byte(opType)); err != nil { - return operation, cerrors.Errorf("couldn't unmarshal record operation") - } - default: - return operation, cerrors.Errorf("expected a record.Operation or a string, got %T", opType) - } - return operation, nil -} - -// UnwrapMetadata extracts metadata from a structuredData record. -func (o *openCDCUnwrapper) UnwrapMetadata(structData record.StructuredData) (record.Metadata, error) { - var metadata record.Metadata - meta, ok := structData["metadata"] - if !ok { - return metadata, cerrors.Errorf("record payload after doesn't contain metadata") - } - - switch m := meta.(type) { - case record.Metadata: - metadata = m - case map[string]interface{}: - metadata = make(record.Metadata, len(m)) - for k, v := range m { - metadata[k] = fmt.Sprint(v) - } - default: - return metadata, cerrors.Errorf("expected a record.Metadata or a map[string]interface{}, got %T", m) - } - return metadata, nil -} - -// UnwrapKey extracts key from a structuredData record. -func (o *openCDCUnwrapper) UnwrapKey(structData record.StructuredData) (record.Data, error) { - var key record.Data - ky, ok := structData["key"] - if !ok { - return key, cerrors.Errorf("record payload after doesn't contain key") - } - switch k := ky.(type) { - case map[string]interface{}: - convertedData := make(record.StructuredData, len(k)) - for kk, v := range k { - convertedData[kk] = v - } - key = convertedData - case string: - decoded := make([]byte, base64.StdEncoding.DecodedLen(len(k))) - n, err := base64.StdEncoding.Decode(decoded, []byte(k)) - if err != nil { - return key, cerrors.Errorf("couldn't decode key: %w", err) - } - key = record.RawData{Raw: decoded[:n]} - default: - return key, cerrors.Errorf("expected a record.Data or a string, got %T", k) - } - return key, nil -} - -func (o *openCDCUnwrapper) convertPayloadData(payload map[string]interface{}, key string) (record.Data, error) { - payloadData, ok := payload[key] - if !ok { - return nil, nil - } - - switch data := payloadData.(type) { - case map[string]interface{}: - convertedData := make(record.StructuredData, len(data)) - for k, v := range data { - convertedData[k] = v - } - return convertedData, nil - case string: - decoded := make([]byte, base64.StdEncoding.DecodedLen(len(data))) - n, err := base64.StdEncoding.Decode(decoded, []byte(data)) - if err != nil { - return nil, cerrors.Errorf("couldn't decode payload %s: %w", err, key) - } - return record.RawData{Raw: decoded[:n]}, nil - default: - return nil, nil - } -} - -// UnwrapPayload extracts payload from a structuredData record. -func (o *openCDCUnwrapper) UnwrapPayload(structData record.StructuredData) (record.Change, error) { - var payload record.Change - pl, ok := structData["payload"] - if !ok { - return payload, cerrors.Errorf("record payload doesn't contain payload") - } - - switch p := pl.(type) { - case record.Change: - payload = p - case map[string]interface{}: - before, err := o.convertPayloadData(p, "before") - if err != nil { - return record.Change{}, err - } - - after, err := o.convertPayloadData(p, "after") - if err != nil { - return record.Change{}, err - } - - payload = record.Change{ - Before: before, - After: after, - } - default: - return record.Change{}, cerrors.Errorf("expected a record.Change or a map[string]interface{}, got %T", p) - } - return payload, nil -} - -// Unwrap replaces the whole record.payload with record.payload.after.payload except position. -func (o *openCDCUnwrapper) Unwrap(rec record.Record) (record.Record, error) { - var structData record.StructuredData - data := rec.Payload.After - switch d := data.(type) { - case record.RawData: - // unmarshal raw data to structured - if err := json.Unmarshal(data.Bytes(), &structData); err != nil { - return record.Record{}, cerrors.Errorf("failed to unmarshal raw data as JSON: %w", unwrapProcType, err) - } - case record.StructuredData: - structData = d - default: - return record.Record{}, cerrors.Errorf("unexpected data type %T", unwrapProcType, data) - } - - operation, err := o.UnwrapOperation(structData) - if err != nil { - return record.Record{}, err - } - - metadata, err := o.UnwrapMetadata(structData) - if err != nil { - return record.Record{}, err - } - - key, err := o.UnwrapKey(structData) - if err != nil { - return record.Record{}, err - } - - payload, err := o.UnwrapPayload(structData) - if err != nil { - return record.Record{}, err - } - - // Position is the only key we preserve from the original record to maintain the reference respect other messages - // that will be coming from in the event of chaining pipelines (e.g.: source -> kafka, kafka -> destination) - return record.Record{ - Key: key, - Position: rec.Position, - Metadata: metadata, - Payload: payload, - Operation: operation, - }, nil -} - -/* -Example of a kafka-connect record: - { - "payload": { - "description": "desc", - "id": 20 - }, - "schema": {} // will be ignored - } -*/ - -// kafkaConnectUnwrapper unwraps a kafka connect record from the payload, expects rec.Payload.After to be of type record.StructuredData -type kafkaConnectUnwrapper struct{} - -func (k *kafkaConnectUnwrapper) Unwrap(rec record.Record) (record.Record, error) { - // record must be structured - structPayload, ok := rec.Payload.After.(record.StructuredData) - if !ok { - return record.Record{}, cerrors.Errorf("record payload data must be structured data") - } - - // get payload - structPayload, ok = structPayload["payload"].(map[string]any) - if !ok { - return record.Record{}, cerrors.Errorf("payload doesn't contain a record") - } - - return record.Record{ - Key: k.UnwrapKey(rec.Key), - Position: rec.Position, - Metadata: nil, - Payload: record.Change{ - Before: nil, - After: structPayload, - }, - Operation: record.OperationSnapshot, - }, nil -} - -// UnwrapKey unwraps key as a kafka connect formatted record, returns the key's payload content, or returns the -// original key if payload doesn't exist. -func (k *kafkaConnectUnwrapper) UnwrapKey(key record.Data) record.Data { - // convert the key to structured data - var structKey record.StructuredData - switch d := key.(type) { - case record.RawData: - // try unmarshalling raw key - err := json.Unmarshal(key.Bytes(), &structKey) - // if key is not json formatted, return the original key - if err != nil { - return key - } - case record.StructuredData: - structKey = d - } - - payload, ok := structKey["payload"] - // return the original key if it doesn't contain a payload - if !ok { - return key - } - - // if payload is a map, return the payload as structured data - if p, ok := payload.(map[string]any); ok { - return record.StructuredData(p) - } - - // otherwise, convert the payload to string, then return it as raw data - raw := fmt.Sprint(payload) - - return record.RawData{Raw: []byte(raw)} -} - -/* -Example of a debezium record: - { - "payload": { - "after": { - "description": "desc", - "id": 20 - }, - "before": null, - "op": "c", - "source": { - "opencdc.readAt": "1674061777225877000", - "opencdc.version": "v1", - }, - "transaction": null, - "ts_ms": 1674061777225 - }, - "schema": {} // will be ignored - } -*/ -// debeziumUnwrapper unwraps a debezium record from the payload. -type debeziumUnwrapper struct { - kafkaConnectUnwrapper kafkaConnectUnwrapper -} - -const ( - debeziumOpCreate = "c" - debeziumOpUpdate = "u" - debeziumOpDelete = "d" - debeziumOpRead = "r" // snapshot - debeziumOpUnset = "$unset" // mongoDB unset operation - - debeziumFieldBefore = "before" - debeziumFieldAfter = "after" - debeziumFieldSource = "source" - debeziumFieldOp = "op" - debeziumFieldTimestamp = "ts_ms" -) - -func (d *debeziumUnwrapper) Unwrap(rec record.Record) (record.Record, error) { - // record must be structured - debeziumRec, ok := rec.Payload.After.(record.StructuredData) - if !ok { - return record.Record{}, cerrors.Errorf("record payload data must be structured data") - } - // get payload - debeziumRec, ok = debeziumRec["payload"].(map[string]any) // the payload has the debezium record - if !ok { - return record.Record{}, cerrors.Errorf("payload doesn't contain a record") - } - - // check fields under payload - err := d.validateRecord(debeziumRec) - if err != nil { - return record.Record{}, err - } - - before, err := d.valueToData(debeziumRec[debeziumFieldBefore]) - if err != nil { - return record.Record{}, cerrors.Errorf("failed to parse field %s: %w", debeziumFieldBefore, err) - } - - after, err := d.valueToData(debeziumRec[debeziumFieldAfter]) - if err != nil { - return record.Record{}, cerrors.Errorf("failed to parse field %s: %w", debeziumFieldAfter, err) - } - - op, ok := debeziumRec[debeziumFieldOp].(string) - if !ok { - return record.Record{}, cerrors.Errorf("%s operation is not a string", op) - } - - operation, err := d.convertOperation(op) - if err != nil { - return record.Record{}, cerrors.Errorf("error unwrapping operation: %w", err) - } - - metadata, err := d.unwrapMetadata(rec) - if err != nil { - return record.Record{}, cerrors.Errorf("error unwrapping metadata: %w", err) - } - - return record.Record{ - Key: d.kafkaConnectUnwrapper.UnwrapKey(rec.Key), - Position: rec.Position, - Operation: operation, - Payload: record.Change{ - Before: before, - After: after, - }, - Metadata: metadata, - }, nil -} - -func (d *debeziumUnwrapper) valueToData(val any) (record.Data, error) { - switch v := val.(type) { - case map[string]any: - return record.StructuredData(v), nil - case string: - return record.RawData{Raw: []byte(v)}, nil - case nil: - // nil is allowed - return nil, nil - default: - return nil, cerrors.Errorf("expected a map or a string, got %T", val) - } -} - -func (d *debeziumUnwrapper) validateRecord(data record.StructuredData) error { - var multiErr error - if _, ok := data[debeziumFieldAfter]; !ok { - multiErr = multierror.Append(multiErr, cerrors.Errorf("the %q field is missing from debezium payload", debeziumFieldAfter)) - } - if _, ok := data[debeziumFieldSource]; !ok { - multiErr = multierror.Append(multiErr, cerrors.Errorf("the %q field is missing from debezium payload", debeziumFieldSource)) - } - if _, ok := data[debeziumFieldOp]; !ok { - multiErr = multierror.Append(multiErr, cerrors.Errorf("the %q field is missing from debezium payload", debeziumFieldOp)) - } - // ts_ms and transaction can be empty - return multiErr -} - -func (d *debeziumUnwrapper) unwrapMetadata(rec record.Record) (record.Metadata, error) { - debeziumRec := rec.Payload.After.(record.StructuredData)["payload"].(map[string]any) - - var source map[string]string - for field, val := range debeziumRec { - switch field { - case debeziumFieldAfter, debeziumFieldBefore, debeziumFieldOp: - continue // ignore - case debeziumFieldTimestamp: - tsMs, ok := val.(float64) - if !ok { - return nil, cerrors.Errorf("%s is not a float", debeziumFieldTimestamp) - } - readAt := time.UnixMilli(int64(tsMs)) - rec.Metadata.SetReadAt(readAt) - case debeziumFieldSource: - // don't add prefix for source fields to be consistent with the - // behavior of the debezium converter in the SDK - it puts all - // metadata fields into the `source` field - source = d.flatten("", val) - default: - flattened := d.flatten("debezium."+field, val) - for k, v := range flattened { - rec.Metadata[k] = v - } - } - } - - // source is added at the end to overwrite any other fields - for k, v := range source { - rec.Metadata[k] = v - } - - return rec.Metadata, nil -} - -func (d *debeziumUnwrapper) flatten(key string, val any) map[string]string { - var prefix string - if len(key) > 0 { - prefix = key + "." - } - switch val := val.(type) { - case map[string]any: - out := make(map[string]string) - for k1, v1 := range val { - for k2, v2 := range d.flatten(prefix+k1, v1) { - out[k2] = v2 - } - } - return out - case nil: - return nil - case string: - return map[string]string{key: val} - default: - return map[string]string{key: fmt.Sprint(val)} - } -} - -// convertOperation converts debezium operation to openCDC operation -func (d *debeziumUnwrapper) convertOperation(op string) (record.Operation, error) { - switch op { - case debeziumOpCreate: - return record.OperationCreate, nil - case debeziumOpUpdate: - return record.OperationUpdate, nil - case debeziumOpDelete: - return record.OperationDelete, nil - case debeziumOpRead: - return record.OperationSnapshot, nil - case debeziumOpUnset: - return record.OperationUpdate, nil - } - return 0, cerrors.Errorf("%q is an invalid operation", op) -} diff --git a/pkg/processor/procbuiltin/unwrap_test.go b/pkg/processor/procbuiltin/unwrap_test.go deleted file mode 100644 index 40590afe4..000000000 --- a/pkg/processor/procbuiltin/unwrap_test.go +++ /dev/null @@ -1,1000 +0,0 @@ -// Copyright © 2023 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/google/go-cmp/cmp" - "github.com/matryer/is" -) - -const DebeziumRecordPayload = `{ - "payload": { - "after": { - "description": "test1", - "id": 27 - }, - "before": null, - "op": "c", - "source": { - "opencdc.readAt": "1674061777225877000", - "opencdc.version": "v1" - }, - "transaction": null, - "ts_ms": 1674061777225 - }, - "schema": {} - }` - -const OpenCDCRecordUpdateWithBeforePayload = `{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "update", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1" - }, - "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - "payload": { - "before": { - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false - }, - "after": { - "event_id": 1747353658, - "msg": "string 0f5397c9-31f1-422a-9c9a-26e3574a5c31", - "pg_generator": false, - "sensor_id": 1250383580, - "triggered": false - } - } - }` - -const OpenCDCRecordUpdateWithoutBeforePayload = `{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "update", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1" - }, - "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - "payload": { - "before": null, - "after": { - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false - } - } - }` - -const OpenCDCRecordDeleteWithoutBeforePayload = `{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "delete", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1" - }, - "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - "payload": { - "before": null, - "after": null - } - }` - -const OpenCDCRecordDeleteWithBeforePayload = `{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "delete", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1" - }, - "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - "payload": { - "before": { - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false - }, - "after": null - } - }` -const OpenCDCRecordCreatePayload = `{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "create", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1" - }, - "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - "payload": { - "before": null, - "after": { - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false - } - } - }` - -func TestUnwrap_Config(t *testing.T) { - tests := []struct { - name string - config processor.Config - wantErr bool - }{ - { - name: "empty config", - config: processor.Config{}, - wantErr: true, - }, - { - name: "invalid config", - config: processor.Config{ - Settings: map[string]string{"format": "bar"}, - }, - wantErr: true, - }, - { - name: "valid debezium config", - config: processor.Config{ - Settings: map[string]string{"format": "debezium"}, - }, - wantErr: false, - }, - { - name: "valid kafka-connect config", - config: processor.Config{ - Settings: map[string]string{"format": "kafka-connect"}, - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := Unwrap(tt.config) - if (err != nil) != tt.wantErr { - t.Fatalf("Unwrap() error = %v, wantErr = %v", err, tt.wantErr) - } - }) - } -} - -func TestUnwrap_Process(t *testing.T) { - tests := []struct { - name string - record record.Record - want record.Record - config processor.Config - wantErr bool - }{ - { - name: "raw payload", - config: processor.Config{ - Settings: map[string]string{"format": "debezium"}, - }, - record: record.Record{ - Metadata: map[string]string{}, - Key: record.RawData{ - Raw: []byte(`{"payload":"id"}`), - }, - Position: []byte("position"), - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(DebeziumRecordPayload), - }, - }, - }, - want: record.Record{ - Operation: record.OperationCreate, - Metadata: map[string]string{ - "opencdc.readAt": "1674061777225877000", - "opencdc.version": "v1", - }, - Key: record.RawData{ - Raw: []byte("id"), - }, - Position: []byte("position"), - Payload: record.Change{ - Before: nil, - After: record.StructuredData{"description": "test1", "id": float64(27)}, - }, - }, - wantErr: false, - }, - { - name: "structured payload", - config: processor.Config{ - Settings: map[string]string{"format": "debezium"}, - }, - record: record.Record{ - Metadata: map[string]string{ - "conduit.version": "v0.4.0", - }, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "payload": map[string]any{ - "after": map[string]any{ - "description": "test1", - "id": 27, - }, - "before": nil, - "op": "u", - "source": map[string]any{ - "opencdc.version": "v1", - }, - "transaction": nil, - "ts_ms": float64(1674061777225), - }, - "schema": map[string]any{}, - }, - }, - Key: record.StructuredData{ - "payload": 27, - "schema": map[string]any{}, - }, - }, - want: record.Record{ - Operation: record.OperationUpdate, - Metadata: map[string]string{ - "opencdc.readAt": "1674061777225000000", - "opencdc.version": "v1", - "conduit.version": "v0.4.0", - }, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{"description": "test1", "id": 27}, - }, - Key: record.RawData{ - Raw: []byte("27"), - }, - }, - wantErr: false, - }, - { - name: "structured payload kafka-connect", - config: processor.Config{ - Settings: map[string]string{"format": "kafka-connect"}, - }, - record: record.Record{ - Metadata: map[string]string{}, - Payload: record.Change{ - Before: record.StructuredData(nil), - After: record.StructuredData{ - "payload": map[string]any{ - "description": "test2", - "id": 27, - }, - "schema": map[string]any{}, - }, - }, - Key: record.StructuredData{ - "payload": map[string]any{ - "id": 27, - }, - "schema": map[string]any{}, - }, - }, - want: record.Record{ - Operation: record.OperationSnapshot, - Payload: record.Change{ - After: record.StructuredData{"description": "test2", "id": 27}, - }, - Key: record.StructuredData{"id": 27}, - }, - wantErr: false, - }, - { - name: "payload is invalid JSON", - config: processor.Config{ - Settings: map[string]string{"format": "kafka-connect"}, - }, - record: record.Record{ - Metadata: map[string]string{}, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("\"invalid\":\"true\""), - Schema: nil, - }, - }, - }, - wantErr: true, - }, - { - name: "mongoDB debezium record", - config: processor.Config{ - Settings: map[string]string{"format": "debezium"}, - }, - record: record.Record{ - Metadata: map[string]string{}, - Key: record.RawData{Raw: []byte(`{ "payload": { "id": "{ \"$oid\" : \"63210f1a3bc50864fde46a84\"}" }, "schema": { "fields": [ { "field": "id", "optional": false, "type": "string" } ], "name": "resource_7_735174.demo.user.Key", "optional": false, "type": "struct" } }`)}, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "payload": map[string]any{ - "after": `{"_id": {"$oid": "63210f1a3bc50864fde46a84"},"name": "First Last","age": 205}`, - "before": nil, - "op": "$unset", - "source": map[string]any{ - "opencdc.version": "v1", - }, - "transaction": nil, - "ts_ms": float64(1674061777225), - }, - "schema": map[string]any{}, - }, - }, - }, - want: record.Record{ - Operation: record.OperationUpdate, - Metadata: map[string]string{ - "opencdc.readAt": "1674061777225000000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - After: record.RawData{Raw: []byte(`{"_id": {"$oid": "63210f1a3bc50864fde46a84"},"name": "First Last","age": 205}`)}, - Before: nil, - }, - Key: record.StructuredData{"id": `{ "$oid" : "63210f1a3bc50864fde46a84"}`}, - }, - wantErr: false, - }, - { - name: "mongoDB debezium record delete", - config: processor.Config{ - Settings: map[string]string{"format": "debezium"}, - }, - record: record.Record{ - Metadata: map[string]string{}, - Key: record.RawData{Raw: []byte(`{ "payload": { "id": "{ \"$oid\" : \"63e69d7f07908def1d0a2504\"}" }, "schema": { "fields": [ { "field": "id", "optional": false, "type": "string" } ], "name": "resource_7_390584.demo.user.Key", "optional": false, "type": "struct" } }`)}, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "payload": map[string]any{ - "after": nil, - "before": nil, - "filter": `{"_id": {"$oid": "63e69d7f07908def1d0a2504"}}`, - "op": "d", - "patch": nil, - "source": map[string]any{ - "opencdc.version": "v1", - }, - "transaction": nil, - "ts_ms": float64(1674061777225), - }, - "schema": map[string]any{}, - }, - }, - }, - want: record.Record{ - Operation: record.OperationDelete, - Metadata: map[string]string{ - "opencdc.readAt": "1674061777225000000", - "opencdc.version": "v1", - "debezium.filter": `{"_id": {"$oid": "63e69d7f07908def1d0a2504"}}`, - }, - Payload: record.Change{ - After: nil, - Before: nil, - }, - Key: record.StructuredData{"id": `{ "$oid" : "63e69d7f07908def1d0a2504"}`}, - }, - wantErr: false, - }, - { - name: "mongoDB debezium record update", - config: processor.Config{ - Settings: map[string]string{"format": "debezium"}, - }, - record: record.Record{ - Metadata: map[string]string{}, - Key: record.RawData{Raw: []byte(`{ "payload": { "id": "{ \"$oid\" : \"63e69d7f07908def1d0a2504\"}" }, "schema": { "fields": [ { "field": "id", "optional": false, "type": "string" } ], "name": "resource_7_390584.demo.user.Key", "optional": false, "type": "struct" } }`)}, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "payload": map[string]any{ - "after": nil, - "before": nil, - "op": "u", - "filter": `{"_id": {"$oid": "63e69d7f07908def1d0a2504"}}`, - "patch": `{"$v": 2,"diff": {"u": {"age": {"$numberLong": "80"},"name": "Some Person80"}}}`, - "source": map[string]any{ - "opencdc.version": "v1", - "my_int": 123, - }, - "transaction": nil, - "ts_ms": float64(1674061777225), - }, - "schema": map[string]any{}, - }, - }, - }, - want: record.Record{ - Operation: record.OperationUpdate, - Metadata: map[string]string{ - "opencdc.readAt": "1674061777225000000", - "opencdc.version": "v1", - "my_int": "123", - "debezium.filter": `{"_id": {"$oid": "63e69d7f07908def1d0a2504"}}`, - "debezium.patch": `{"$v": 2,"diff": {"u": {"age": {"$numberLong": "80"},"name": "Some Person80"}}}`, - }, - Payload: record.Change{ - After: nil, - Before: nil, - }, - Key: record.StructuredData{"id": `{ "$oid" : "63e69d7f07908def1d0a2504"}`}, - }, - wantErr: false, - }, - { - name: "mongoDB debezium record update v2", - config: processor.Config{ - Settings: map[string]string{"format": "debezium"}, - }, - record: record.Record{ - Metadata: map[string]string{}, - Key: record.RawData{Raw: []byte(`{ "payload": { "id": "{ \"$oid\" : \"63ea773a3966740fe712036f\"}" }, "schema": { "fields": [ { "field": "id", "optional": false, "type": "string" } ], "name": "resource_7_390584.demo.user.Key", "optional": false, "type": "struct" } }`)}, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "payload": map[string]any{ - "before": nil, - "after": `{"_id": {"$oid": "63ea773a3966740fe712036f"},"name": "mickey mouse","phones": ["+1 222","+387 123"]}`, - "patch": nil, - "filter": nil, - "updateDescription": map[string]any{ - "removedFields": nil, - "updatedFields": `{"phones": ["+1 222", "+387 123"]}`, - "truncatedArrays": nil, - }, - "op": "u", - "source": map[string]any{}, - "transaction": nil, - "ts_ms": float64(1674061777225), - }, - "schema": map[string]any{}, - }, - }, - }, - want: record.Record{ - Operation: record.OperationUpdate, - Metadata: map[string]string{ - "opencdc.readAt": "1674061777225000000", - "debezium.updateDescription.updatedFields": `{"phones": ["+1 222", "+387 123"]}`, - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(`{"_id": {"$oid": "63ea773a3966740fe712036f"},"name": "mickey mouse","phones": ["+1 222","+387 123"]}`), - }, - }, - Key: record.StructuredData{"id": `{ "$oid" : "63ea773a3966740fe712036f"}`}, - }, - wantErr: false, - }, - { - name: "opencdc record create with structured data and no payload after", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key")}, - Operation: record.OperationCreate, - Metadata: map[string]string{}, - Payload: record.Change{ - Before: nil, - After: nil, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{}, - wantErr: true, - }, - { - name: "opencdc record create with an invalid operation", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(`{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "invalid", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1" - }, - "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - "payload": { - "after": { - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false - } - } - }`, - ), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{}, - wantErr: true, - }, - { - name: "opencdc record create with an invalid metadata", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(`{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "create", - "metadata": "invalid", - "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - "payload": { - "after": { - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false - } - } - }`, - ), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{}, - wantErr: true, - }, - { - name: "opencdc record create with an invalid key", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(`{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "create", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1" - }, - "key": 1, - "payload": { - "after": { - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false - } - } - }`, - ), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{}, - wantErr: true, - }, - { - name: "opencdc record create with an invalid payload", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(`{ - "position": "NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0", - "operation": "create", - "metadata": { - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1" - }, - "key": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - }`, - ), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{}, - wantErr: true, - }, - { - name: "opencdc record create with structured data", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "position": []byte("NzgyNjJmODUtODNmMS00ZGQwLWEyZDAtNTRmNjA1ZjkyYTg0"), - "operation": record.OperationCreate, - "metadata": record.Metadata{ - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1", - }, - "key": map[string]interface{}{ - "id": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh", - }, - "payload": record.Change{ - Before: nil, - After: record.StructuredData{ - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false, - }, - }, - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{ - Operation: record.OperationCreate, - Metadata: record.Metadata{ - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "event_id": 1747353650, - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": 1250383582, - "triggered": false, - }, - }, - Key: record.StructuredData{"id": "MTc3NzQ5NDEtNTdhMi00MmZhLWI0MzAtODkxMmE5NDI0YjNh"}, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - wantErr: false, - }, - { - name: "opencdc record create with raw data", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(OpenCDCRecordCreatePayload), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{ - Operation: record.OperationCreate, - Metadata: record.Metadata{ - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "event_id": float64(1747353650), - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": float64(1250383582), - "triggered": false, - }, - }, - Key: record.RawData{Raw: []byte("17774941-57a2-42fa-b430-8912a9424b3a")}, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - wantErr: false, - }, - { - name: "opencdc record delete with before and with raw data", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(OpenCDCRecordDeleteWithBeforePayload), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{ - Operation: record.OperationDelete, - Metadata: record.Metadata{ - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: record.StructuredData{ - "event_id": float64(1747353650), - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": float64(1250383582), - "triggered": false, - }, - After: nil, - }, - Key: record.RawData{Raw: []uint8("17774941-57a2-42fa-b430-8912a9424b3a")}, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - wantErr: false, - }, - { - name: "opencdc record delete without before and with raw data", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(OpenCDCRecordDeleteWithoutBeforePayload), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{ - Operation: record.OperationDelete, - Metadata: record.Metadata{ - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: nil, - }, - Key: record.RawData{Raw: []uint8("17774941-57a2-42fa-b430-8912a9424b3a")}, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - wantErr: false, - }, - { - name: "opencdc record update with before and with raw data", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(OpenCDCRecordUpdateWithBeforePayload), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{ - Operation: record.OperationUpdate, - Metadata: record.Metadata{ - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: record.StructuredData{ - "event_id": float64(1747353650), - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": float64(1250383582), - "triggered": false, - }, - After: record.StructuredData{ - "event_id": float64(1.747353658e+09), - "msg": "string 0f5397c9-31f1-422a-9c9a-26e3574a5c31", - "pg_generator": false, - "sensor_id": float64(1.25038358e+09), - "triggered": false, - }, - }, - Key: record.RawData{Raw: []byte("17774941-57a2-42fa-b430-8912a9424b3a")}, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - wantErr: false, - }, - { - name: "opencdc record update without before and with raw data", - config: processor.Config{ - Settings: map[string]string{"format": "opencdc"}, - }, - record: record.Record{ - Key: record.RawData{Raw: []byte("one-key-raw-data")}, - Operation: record.OperationCreate, - Metadata: map[string]string{ - "conduit.source.connector.id": "dest-log-78lpnchx7tzpyqz:source-kafka", - "kafka.topic": "stream-78lpnchx7tzpyqz-generator", - "opencdc.createdAt": "1706028953595000000", - "opencdc.readAt": "1706028953606997000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte(OpenCDCRecordUpdateWithoutBeforePayload), - }, - }, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - want: record.Record{ - Operation: record.OperationUpdate, - Metadata: record.Metadata{ - "conduit.source.connector.id": "source-generator-78lpnchx7tzpyqz:source", - "opencdc.readAt": "1706028953595546000", - "opencdc.version": "v1", - }, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "event_id": float64(1747353650), - "msg": "string 0e8955b3-7fb5-4dda-8064-e10dc007f00d", - "pg_generator": false, - "sensor_id": float64(1250383582), - "triggered": false, - }, - }, - Key: record.RawData{Raw: []byte("17774941-57a2-42fa-b430-8912a9424b3a")}, - Position: []byte("eyJHcm91cElEIjoiNGQ2ZTBhMjktNzAwZi00Yjk4LWEzY2MtZWUyNzZhZTc4MjVjIiwiVG9waWMiOiJzdHJlYW0tNzhscG5jaHg3dHpweXF6LWdlbmVyYXRvciIsIlBhcnRpdGlvbiI6MCwiT2Zmc2V0IjoyMjF9"), - }, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := Unwrap(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.record) - if (err != nil) != tt.wantErr { - t.Fatalf("process() error = %v, wantErr = %v", err, tt.wantErr) - } - - if diff := cmp.Diff(tt.want, got); diff != "" { - t.Errorf("process() diff = %s", diff) - } - }) - } -} diff --git a/pkg/processor/procbuiltin/util.go b/pkg/processor/procbuiltin/util.go deleted file mode 100644 index f01536a55..000000000 --- a/pkg/processor/procbuiltin/util.go +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "strconv" - "time" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -var errEmptyConfigField = cerrors.New("empty config field") - -func getConfigFieldString(c processor.Config, field string) (string, error) { - val, ok := c.Settings[field] - if !ok || val == "" { - return "", cerrors.Errorf("failed to retrieve config field %q: %w", field, errEmptyConfigField) - } - return val, nil -} - -func getConfigFieldFloat64(c processor.Config, field string) (float64, error) { - raw, err := getConfigFieldString(c, field) - if err != nil { - return 0, err - } - - parsed, err := strconv.ParseFloat(raw, 64) - if err != nil { - return 0, cerrors.Errorf("failed to parse %q as float64: %w", field, err) - } - - return parsed, nil -} - -func getConfigFieldInt64(c processor.Config, field string) (int64, error) { - raw, err := getConfigFieldString(c, field) - if err != nil { - return 0, err - } - - parsed, err := strconv.ParseInt(raw, 10, 64) - if err != nil { - return 0, cerrors.Errorf("failed to parse %q as int64: %w", field, err) - } - - return parsed, nil -} - -func getConfigFieldDuration(c processor.Config, field string) (time.Duration, error) { - raw, err := getConfigFieldString(c, field) - if err != nil { - return 0, err - } - - parsed, err := time.ParseDuration(raw) - if err != nil { - return 0, cerrors.Errorf("failed to parse %q as time.Duration: %w", field, err) - } - - return parsed, nil -} - -// recordDataGetSetter is a utility that returns either the key or the payload -// data. It provides also a function to set the key or payload data. -// It is useful when writing 2 processors that do the same thing, except that -// one operates on the key and the other on the payload. -type recordDataGetSetter interface { - Get(record.Record) record.Data - Set(record.Record, record.Data) record.Record -} - -type recordPayloadGetSetter struct{} - -func (recordPayloadGetSetter) Get(r record.Record) record.Data { - // TODO what do we do with Payload.Before in a transform? - return r.Payload.After -} -func (recordPayloadGetSetter) Set(r record.Record, d record.Data) record.Record { - r.Payload.After = d - return r -} - -type recordKeyGetSetter struct{} - -func (recordKeyGetSetter) Get(r record.Record) record.Data { - return r.Key -} -func (recordKeyGetSetter) Set(r record.Record, d record.Data) record.Record { - r.Key = d - return r -} diff --git a/pkg/processor/procbuiltin/valuetokey.go b/pkg/processor/procbuiltin/valuetokey.go deleted file mode 100644 index db2e0fdb7..000000000 --- a/pkg/processor/procbuiltin/valuetokey.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "strings" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" -) - -const ( - valueToKeyProcType = "valuetokey" - valueToKeyConfigFields = "fields" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(valueToKeyProcType, ValueToKey) -} - -// ValueToKey builds a processor that replaces the record key with a new key -// formed from a subset of fields in the record value. -// - If Payload.After is structured, the created key will also be structured -// with a subset of fields. -// - If Payload.After is raw, return an error (not supported yet). -func ValueToKey(config processor.Config) (processor.Interface, error) { - if config.Settings[valueToKeyConfigFields] == "" { - return nil, cerrors.Errorf("%s: unspecified field %q", valueToKeyProcType, valueToKeyConfigFields) - } - - fields := strings.Split(config.Settings[valueToKeyConfigFields], ",") - - return NewFuncWrapper(func(_ context.Context, r record.Record) (_ record.Record, err error) { - defer func() { - if err != nil { - err = cerrors.Errorf("%s: %w", valueToKeyProcType, err) - } - }() - - switch d := r.Payload.After.(type) { - case record.StructuredData: - key := record.StructuredData{} - for _, f := range fields { - key[f] = d[f] - } - r.Key = key - return r, nil - case record.RawData: - return record.Record{}, cerrors.ErrNotImpl - default: - return record.Record{}, cerrors.Errorf("unexpected payload type %T", r.Payload) - } - }), nil -} diff --git a/pkg/processor/procbuiltin/valuetokey_test.go b/pkg/processor/procbuiltin/valuetokey_test.go deleted file mode 100644 index 5921d2634..000000000 --- a/pkg/processor/procbuiltin/valuetokey_test.go +++ /dev/null @@ -1,158 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procbuiltin - -import ( - "context" - "reflect" - "testing" - - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/conduitio/conduit/pkg/record/schema/mock" - "github.com/matryer/is" -) - -func TestValueToKey_Build(t *testing.T) { - type args struct { - config processor.Config - } - tests := []struct { - name string - args args - wantErr bool - }{{ - name: "nil config returns error", - args: args{config: processor.Config{}}, - wantErr: true, - }, { - name: "empty config returns error", - args: args{config: processor.Config{ - Settings: map[string]string{}, - }}, - wantErr: true, - }, { - name: "empty field returns error", - args: args{config: processor.Config{ - Settings: map[string]string{valueToKeyConfigFields: ""}, - }}, - wantErr: true, - }, { - name: "non-empty field returns processor", - args: args{config: processor.Config{ - Settings: map[string]string{valueToKeyConfigFields: "foo"}, - }}, - wantErr: false, - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := ValueToKey(tt.args.config) - if (err != nil) != tt.wantErr { - t.Errorf("ValueToKey() error = %v, wantErr = %v", err, tt.wantErr) - return - } - }) - } -} - -func TestValueToKey_Process(t *testing.T) { - is := is.New(t) - - type args struct { - r record.Record - } - tests := []struct { - name string - config processor.Config - args args - want record.Record - wantErr bool - }{{ - name: "structured data", - config: processor.Config{ - Settings: map[string]string{valueToKeyConfigFields: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": nil, - }, - }, - }}, - want: record.Record{ - Key: record.StructuredData{ - "foo": 123, - }, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": 123, - "bar": nil, - }, - }, - }, - wantErr: false, - }, { - name: "raw data without schema", - config: processor.Config{ - Settings: map[string]string{valueToKeyConfigFields: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: nil, - }, - }, - }}, - want: record.Record{}, - wantErr: true, - }, { - name: "raw data with schema", - config: processor.Config{ - Settings: map[string]string{valueToKeyConfigFields: "foo"}, - }, - args: args{r: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{ - Raw: []byte("raw data"), - Schema: mock.NewSchema(nil), - }, - }, - }}, - want: record.Record{}, - wantErr: true, // TODO not implemented - }} - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := ValueToKey(tt.config) - is.NoErr(err) - got, err := underTest.Process(context.Background(), tt.args.r) - if (err != nil) != tt.wantErr { - t.Errorf("process() error = %v, wantErr = %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("process() got = %v, want = %v", got, tt.want) - } - }) - } -} diff --git a/pkg/processor/processor_condition.go b/pkg/processor/processor_condition.go index 88baa0258..ec9bd3444 100644 --- a/pkg/processor/processor_condition.go +++ b/pkg/processor/processor_condition.go @@ -17,11 +17,12 @@ package processor import ( "bytes" "strconv" + "strings" "text/template" "github.com/Masterminds/sprig/v3" + "github.com/conduitio/conduit-commons/opencdc" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/record" ) // processorCondition parse go templates, Evaluate them for provided records, and return the boolean value of the output. @@ -32,6 +33,9 @@ type processorCondition struct { // newProcessorCondition parses and returns the template, returns an error if template parsing failed. func newProcessorCondition(condition string) (*processorCondition, error) { + if strings.Trim(condition, " ") == "" { + return nil, nil + } // parse template tmpl, err := template.New("").Funcs(sprig.FuncMap()).Parse(condition) if err != nil { @@ -45,7 +49,7 @@ func newProcessorCondition(condition string) (*processorCondition, error) { // Evaluate executes the template for the provided record, and parses the output into a boolean, returns an error // if output is not a boolean. -func (t *processorCondition) Evaluate(rec record.Record) (bool, error) { +func (t *processorCondition) Evaluate(rec opencdc.Record) (bool, error) { var b bytes.Buffer err := t.tmpl.Execute(&b, rec) if err != nil { diff --git a/pkg/processor/processor_condition_test.go b/pkg/processor/processor_condition_test.go index fb13f25c6..3bbbc2131 100644 --- a/pkg/processor/processor_condition_test.go +++ b/pkg/processor/processor_condition_test.go @@ -17,7 +17,7 @@ package processor import ( "testing" - "github.com/conduitio/conduit/pkg/record" + "github.com/conduitio/conduit-commons/opencdc" "github.com/matryer/is" ) @@ -32,9 +32,9 @@ func Test_ProcessorCondition_InvalidTemplate(t *testing.T) { func Test_ProcessorCondition_EvaluateTrue(t *testing.T) { is := is.New(t) condition := `{{ eq .Metadata.key "val" }}` - rec := record.Record{ - Position: record.Position("position-out"), - Metadata: record.Metadata{"key": "val"}, + rec := opencdc.Record{ + Position: opencdc.Position("position-out"), + Metadata: opencdc.Metadata{"key": "val"}, } tmpl, err := newProcessorCondition(condition) is.NoErr(err) @@ -46,9 +46,9 @@ func Test_ProcessorCondition_EvaluateTrue(t *testing.T) { func Test_ProcessorCondition_EvaluateFalse(t *testing.T) { is := is.New(t) condition := `{{ eq .Metadata.key "wrongVal" }}` - rec := record.Record{ - Position: record.Position("position-out"), - Metadata: record.Metadata{"key": "val"}, + rec := opencdc.Record{ + Position: opencdc.Position("position-out"), + Metadata: opencdc.Metadata{"key": "val"}, } tmpl, err := newProcessorCondition(condition) is.NoErr(err) @@ -60,9 +60,9 @@ func Test_ProcessorCondition_EvaluateFalse(t *testing.T) { func Test_ProcessorCondition_NonBooleanOutput(t *testing.T) { is := is.New(t) condition := `{{ printf "hi" }}` - rec := record.Record{ - Position: record.Position("position-out"), - Metadata: record.Metadata{"key": "val"}, + rec := opencdc.Record{ + Position: opencdc.Position("position-out"), + Metadata: opencdc.Metadata{"key": "val"}, } tmpl, err := newProcessorCondition(condition) is.NoErr(err) diff --git a/pkg/processor/procjs/builder.go b/pkg/processor/procjs/builder.go deleted file mode 100644 index fb9b8d479..000000000 --- a/pkg/processor/procjs/builder.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procjs - -import ( - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/rs/zerolog" -) - -const ( - processorType = "js" - configScript = "script" -) - -func init() { - processor.GlobalBuilderRegistry.MustRegister(processorType, Builder) -} - -// Builder parses the config and if valid returns a JS processor, an error -// otherwise. It requires the config field "script". -func Builder(config processor.Config) (processor.Interface, error) { - if config.Settings[configScript] == "" { - return nil, cerrors.Errorf("%s: unspecified field %q", processorType, configScript) - } - - // TODO get logger from config or some other place - cw := zerolog.NewConsoleWriter() - cw.TimeFormat = "2006-01-02T15:04:05+00:00" - logger := zerolog.New(cw).With().Timestamp().Logger() - - p, err := New(config.Settings[configScript], logger) - if err != nil { - return nil, cerrors.Errorf("%s: %w", processorType, err) - } - - return p, nil -} diff --git a/pkg/processor/procjs/processor.go b/pkg/processor/procjs/processor.go deleted file mode 100644 index 5f2d2fd09..000000000 --- a/pkg/processor/procjs/processor.go +++ /dev/null @@ -1,279 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procjs - -import ( - "context" - "sync" - - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/inspector" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/dop251/goja" - "github.com/dop251/goja_nodejs/require" - "github.com/rs/zerolog" - - // register nodejs modules - _ "github.com/dop251/goja_nodejs/buffer" - _ "github.com/dop251/goja_nodejs/console" - _ "github.com/dop251/goja_nodejs/url" - _ "github.com/dop251/goja_nodejs/util" -) - -const ( - entrypoint = "process" -) - -// jsRecord is an intermediary representation of record.Record that is passed to -// the JavaScript transform. We use this because using record.Record would not -// allow us to modify or access certain data (e.g. metadata or structured data). -type jsRecord struct { - Position []byte - Operation string - Metadata map[string]string - Key any - Payload struct { - Before any - After any - } -} - -// Processor is able to run processors defined in JavaScript. -type Processor struct { - gojaPool sync.Pool - inInsp *inspector.Inspector - outInsp *inspector.Inspector -} - -// gojaContext represents one independent goja context. -type gojaContext struct { - runtime *goja.Runtime - function goja.Callable -} - -func New(src string, logger zerolog.Logger) (*Processor, error) { - p := &Processor{ - inInsp: inspector.New(log.New(logger), inspector.DefaultBufferSize), - outInsp: inspector.New(log.New(logger), inspector.DefaultBufferSize), - } - - var err error - runtime, err := p.newJSRuntime(logger) - if err != nil { - return nil, cerrors.Errorf("failed initializing JS runtime: %w", err) - } - - _, err = p.newFunction(runtime, src) - if err != nil { - return nil, cerrors.Errorf("failed initializing JS function: %w", err) - } - - p.gojaPool.New = func() any { - // create a new runtime for the function so it's executed in a separate goja context - rt, _ := p.newJSRuntime(logger) - f, _ := p.newFunction(rt, src) - return &gojaContext{ - runtime: rt, - function: f, - } - } - - return p, nil -} - -func (p *Processor) newJSRuntime(logger zerolog.Logger) (*goja.Runtime, error) { - rt := goja.New() - require.NewRegistry().Enable(rt) - - runtimeHelpers := map[string]interface{}{ - "logger": &logger, - "Record": p.jsRecord(rt), - "RawData": p.jsContentRaw(rt), - "StructuredData": p.jsContentStructured(rt), - } - - for name, helper := range runtimeHelpers { - if err := rt.Set(name, helper); err != nil { - return nil, cerrors.Errorf("failed to set helper %q: %w", name, err) - } - } - - return rt, nil -} - -func (p *Processor) newFunction(runtime *goja.Runtime, src string) (goja.Callable, error) { - prg, err := goja.Compile("", src, false) - if err != nil { - return nil, cerrors.Errorf("failed to compile script: %w", err) - } - - _, err = runtime.RunProgram(prg) - if err != nil { - return nil, cerrors.Errorf("failed to run program: %w", err) - } - - tmp := runtime.Get(entrypoint) - entrypointFunc, ok := goja.AssertFunction(tmp) - if !ok { - return nil, cerrors.Errorf("failed to get entrypoint function %q", entrypoint) - } - - return entrypointFunc, nil -} - -func (p *Processor) jsRecord(runtime *goja.Runtime) func(goja.ConstructorCall) *goja.Object { - return func(call goja.ConstructorCall) *goja.Object { - // TODO accept arguments - // We return a jsRecord struct, however because we are - // not changing call.This instanceof will not work as expected. - - r := jsRecord{ - Metadata: make(map[string]string), - } - // We need to return a pointer to make the returned object mutable. - return runtime.ToValue(&r).ToObject(runtime) - } -} - -func (p *Processor) jsContentRaw(runtime *goja.Runtime) func(goja.ConstructorCall) *goja.Object { - return func(call goja.ConstructorCall) *goja.Object { - // TODO accept arguments - // We return a record.RawData struct, however because we are - // not changing call.This instanceof will not work as expected. - - r := record.RawData{} - // We need to return a pointer to make the returned object mutable. - return runtime.ToValue(&r).ToObject(runtime) - } -} - -func (p *Processor) jsContentStructured(runtime *goja.Runtime) func(goja.ConstructorCall) *goja.Object { - return func(call goja.ConstructorCall) *goja.Object { - // TODO accept arguments - // We return a map[string]interface{} struct, however because we are - // not changing call.This instanceof will not work as expected. - - r := make(map[string]interface{}) - return runtime.ToValue(r).ToObject(runtime) - } -} - -func (p *Processor) Process(ctx context.Context, in record.Record) (record.Record, error) { - p.inInsp.Send(ctx, in) - - g := p.gojaPool.Get().(*gojaContext) - defer p.gojaPool.Put(g) - - jsr := p.toJSRecord(g.runtime, in) - - result, err := g.function(goja.Undefined(), jsr) - if err != nil { - return record.Record{}, cerrors.Errorf("failed to execute JS processor function: %w", err) - } - - out, err := p.toInternalRecord(result) - if err == processor.ErrSkipRecord { - return record.Record{}, err - } - if err != nil { - return record.Record{}, cerrors.Errorf("failed to transform to internal record: %w", err) - } - - p.outInsp.Send(ctx, out) - return out, nil -} - -func (p *Processor) InspectIn(ctx context.Context, id string) *inspector.Session { - return p.inInsp.NewSession(ctx, id) -} - -func (p *Processor) InspectOut(ctx context.Context, id string) *inspector.Session { - return p.outInsp.NewSession(ctx, id) -} - -func (p *Processor) Close() { - p.inInsp.Close() - p.outInsp.Close() -} - -func (p *Processor) toJSRecord(runtime *goja.Runtime, r record.Record) goja.Value { - convertData := func(d record.Data) interface{} { - switch v := d.(type) { - case record.RawData: - return &v - case record.StructuredData: - return map[string]interface{}(v) - } - return nil - } - - jsr := jsRecord{ - Position: r.Position, - Operation: r.Operation.String(), - Metadata: r.Metadata, - Key: convertData(r.Key), - Payload: struct { - Before interface{} - After interface{} - }{ - Before: convertData(r.Payload.Before), - After: convertData(r.Payload.After), - }, - } - - // we need to send in a pointer to let the user change the value and return it, if they choose to do so - return runtime.ToValue(&jsr) -} - -func (p *Processor) toInternalRecord(v goja.Value) (record.Record, error) { - raw := v.Export() - if raw == nil { - return record.Record{}, processor.ErrSkipRecord - } - - jsr, ok := v.Export().(*jsRecord) - if !ok { - return record.Record{}, cerrors.Errorf("js function expected to return %T, but returned: %T", &jsRecord{}, v) - } - - var op record.Operation - err := op.UnmarshalText([]byte(jsr.Operation)) - if err != nil { - return record.Record{}, cerrors.Errorf("could not unmarshal operation: %w", err) - } - - convertData := func(d interface{}) record.Data { - switch v := d.(type) { - case *record.RawData: - return *v - case map[string]interface{}: - return record.StructuredData(v) - } - return nil - } - - return record.Record{ - Position: jsr.Position, - Operation: op, - Metadata: jsr.Metadata, - Key: convertData(jsr.Key), - Payload: record.Change{ - Before: convertData(jsr.Payload.Before), - After: convertData(jsr.Payload.After), - }, - }, nil -} diff --git a/pkg/processor/procjs/processor_test.go b/pkg/processor/procjs/processor_test.go deleted file mode 100644 index 0e5cdb86b..000000000 --- a/pkg/processor/procjs/processor_test.go +++ /dev/null @@ -1,528 +0,0 @@ -// Copyright © 2022 Meroxa, Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package procjs - -import ( - "bytes" - "context" - "reflect" - "testing" - "time" - - "github.com/conduitio/conduit/pkg/foundation/cchan" - "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/record" - "github.com/dop251/goja" - "github.com/matryer/is" - "github.com/rs/zerolog" -) - -func TestJSProcessor_Logger(t *testing.T) { - is := is.New(t) - - var buf bytes.Buffer - logger := zerolog.New(&buf) - underTest, err := New(` - function process(r) { - logger.Info().Msg("Hello"); - return r - } - `, logger) - is.NoErr(err) // expected no error when creating the JS processor - - _, err = underTest.Process(context.Background(), record.Record{}) - is.NoErr(err) // expected no error when processing record - - is.Equal(`{"level":"info","message":"Hello"}`+"\n", buf.String()) // expected different log message -} - -func TestJSProcessor_MissingEntrypoint(t *testing.T) { - is := is.New(t) - - underTest, err := New( - `logger.Debug("no entrypoint");`, - zerolog.Nop(), - ) - - is.True(err != nil) // expected error - is.Equal(`failed initializing JS function: failed to get entrypoint function "process"`, err.Error()) // expected different error message - is.True(underTest == nil) -} - -func TestJSProcessor_Process(t *testing.T) { - type fields struct { - src string - } - type args struct { - record record.Record - } - tests := []struct { - name string - fields fields - args args - want record.Record - wantErr error - }{ - { - name: "change fields of structured record", - fields: fields{ - src: ` - function process(record) { - record.Position = "3"; - record.Operation = "update"; - record.Metadata["returned"] = "JS"; - record.Key.Raw = "baz"; - record.Payload.After["ccc"] = "baz"; - return record; - }`, - }, - args: args{ - record: record.Record{ - Position: []byte("2"), - Operation: record.OperationCreate, - Metadata: record.Metadata{"existing": "val"}, - Key: record.RawData{Raw: []byte("bar")}, - Payload: record.Change{ - Before: nil, - After: record.StructuredData( - map[string]interface{}{ - "aaa": 111, - "bbb": []string{"foo", "bar"}, - }, - ), - }, - }, - }, - want: record.Record{ - Position: []byte("3"), - Operation: record.OperationUpdate, - Metadata: record.Metadata{"existing": "val", "returned": "JS"}, - Key: record.RawData{Raw: []byte("baz")}, - Payload: record.Change{ - Before: nil, - After: record.StructuredData( - map[string]interface{}{ - "aaa": 111, - "bbb": []string{"foo", "bar"}, - "ccc": "baz", - }, - ), - }, - }, - wantErr: nil, - }, - { - name: "complete change incoming record with structured data", - fields: fields{ - src: ` - function process(record) { - record.Position = "3"; - record.Metadata["returned"] = "JS"; - record.Key.Raw = "baz"; - record.Payload.After = new StructuredData(); - record.Payload.After["foo"] = "bar"; - return record; - }`, - }, - args: args{ - record: record.Record{ - Position: []byte("2"), - Metadata: record.Metadata{"existing": "val"}, - Key: record.RawData{Raw: []byte("bar")}, - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: []byte("foo")}, - }, - }, - }, - want: record.Record{ - Position: []byte("3"), - Metadata: record.Metadata{"existing": "val", "returned": "JS"}, - Key: record.RawData{Raw: []byte("baz")}, - Payload: record.Change{ - Before: nil, - After: record.StructuredData{ - "foo": "bar", - }, - }, - }, - wantErr: nil, - }, - { - name: "complete change incoming record with raw data", - fields: fields{ - src: ` - function process(record) { - record.Position = "3"; - record.Metadata["returned"] = "JS"; - record.Key.Raw = "baz"; - record.Payload.After.Raw = String.fromCharCode.apply(String, record.Payload.After.Raw) + "bar"; - return record; - }`, - }, - args: args{ - record: record.Record{ - Position: []byte("2"), - Metadata: record.Metadata{"existing": "val"}, - Key: record.RawData{Raw: []byte("bar")}, - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: []byte("foo")}, - }, - }, - }, - want: record.Record{ - Position: []byte("3"), - Metadata: record.Metadata{"existing": "val", "returned": "JS"}, - Key: record.RawData{Raw: []byte("baz")}, - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: []byte("foobar")}, - }, - }, - wantErr: nil, - }, - { - name: "return new record with raw data", - fields: fields{ - src: ` - function process(record) { - r = new Record(); - r.Position = "3"; - r.Metadata["returned"] = "JS"; - r.Key = new RawData(); - r.Key.Raw = "baz"; - r.Payload.After = new RawData(); - r.Payload.After.Raw = "foobar"; - return r; - }`, - }, - args: args{ - record: record.Record{}, - }, - want: record.Record{ - Position: []byte("3"), - Metadata: record.Metadata{"returned": "JS"}, - Key: record.RawData{Raw: []byte("baz")}, - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: []byte("foobar")}, - }, - }, - wantErr: nil, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := New(tt.fields.src, zerolog.Nop()) - is.NoErr(err) // expected no error when creating the JS processor - - got, err := underTest.Process(context.Background(), tt.args.record) - if tt.wantErr != nil { - is.Equal(tt.wantErr, err) // expected different error - } else { - is.NoErr(err) // expected no error - } - - is.Equal(tt.want, got) // expected different record - }) - } -} - -func TestJSProcessor_Filtering(t *testing.T) { - testCases := []struct { - name string - src string - input record.Record - filter bool - }{ - { - name: "always skip", - src: `function process(r) { - return null; - }`, - input: record.Record{}, - filter: false, - }, - { - name: "filter based on a field - positive", - src: `function process(r) { - if (r.Metadata["keepme"] != undefined) { - return r - } - return null; - }`, - input: record.Record{Metadata: record.Metadata{"keepme": "yes"}}, - filter: true, - }, - { - name: "filter out based on a field - negative", - src: `function process(r) { - if (r.Metadata["keepme"] != undefined) { - return r - } - return null; - }`, - input: record.Record{Metadata: record.Metadata{"foo": "bar"}}, - filter: false, - }, - { - name: "no return value", - src: ` - function process(record) { - logger.Debug("no return value"); - }`, - input: record.Record{Metadata: record.Metadata{"foo": "bar"}}, - filter: false, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := New(tc.src, zerolog.New(zerolog.NewConsoleWriter())) - is.NoErr(err) // expected no error when creating the JS processor - - rec, err := underTest.Process(context.Background(), tc.input) - if tc.filter { - is.NoErr(err) // expected no error for processed record - is.Equal(tc.input, rec) // expected different processed record - } else { - is.True(reflect.ValueOf(rec).IsZero()) // expected zero record - is.True(cerrors.Is(err, processor.ErrSkipRecord)) // expected ErrSkipRecord - } - }) - } -} - -func TestJSProcessor_DataTypes(t *testing.T) { - testCases := []struct { - name string - src string - input record.Record - want record.Record - }{ - { - name: "position from string", - src: `function process(record) { - record.Position = "foobar"; - return record; - }`, - input: record.Record{}, - want: record.Record{ - Position: record.Position("foobar"), - }, - }, - { - name: "raw payload, data from string", - src: `function process(record) { - record.Payload.After = new RawData(); - record.Payload.After.Raw = "foobar"; - return record; - }`, - input: record.Record{}, - want: record.Record{ - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: []byte("foobar")}, - }, - }, - }, - { - name: "raw key, data from string", - src: `function process(record) { - record.Key = new RawData(); - record.Key.Raw = "foobar"; - return record; - }`, - input: record.Record{}, - want: record.Record{ - Key: record.RawData{Raw: []byte("foobar")}, - }, - }, - { - name: "update metadata", - src: `function process(record) { - record.Metadata["new_key"] = "new_value" - delete record.Metadata.remove_me; - return record; - }`, - input: record.Record{ - Metadata: record.Metadata{ - "old_key": "old_value", - "remove_me": "remove_me", - }, - }, - want: record.Record{ - Metadata: record.Metadata{ - "old_key": "old_value", - "new_key": "new_value", - }, - }, - }, - } - - for _, tc := range testCases { - tc := tc - t.Run(tc.name, func(t *testing.T) { - is := is.New(t) - - underTest, err := New(tc.src, zerolog.Nop()) - is.NoErr(err) // expected no error when creating the JS processor - - got, err := underTest.Process(context.Background(), tc.input) - is.NoErr(err) // expected no error when processing record - is.Equal(tc.want, got) // expected different record - }) - } -} - -func TestJSProcessor_Inspect(t *testing.T) { - is := is.New(t) - ctx := context.Background() - src := ` - function process(record) { - record.Key = new RawData(); - record.Key.Raw = "foobar"; - return record; - }` - underTest, err := New(src, zerolog.Nop()) - is.NoErr(err) // expected no error when creating the JS processor - - in := underTest.InspectIn(ctx, "test-id") - out := underTest.InspectOut(ctx, "test-id") - - recIn := record.Record{ - Position: record.Position("test-pos"), - Operation: record.OperationUpdate, - Metadata: record.Metadata{"test": "true"}, - Key: record.RawData{Raw: []byte("test-key")}, - Payload: record.Change{}, - } - recOut, err := underTest.Process(ctx, recIn) - is.NoErr(err) - - inspIn, got, err := cchan.ChanOut[record.Record](in.C).RecvTimeout(ctx, 100*time.Millisecond) - is.NoErr(err) - is.True(got) - is.Equal(recIn, inspIn) - - inspOut, got, err := cchan.ChanOut[record.Record](out.C).RecvTimeout(ctx, 100*time.Millisecond) - is.NoErr(err) - is.True(got) - is.Equal(recOut, inspOut) -} - -func TestJSProcessor_Close(t *testing.T) { - is := is.New(t) - ctx := context.Background() - src := ` - function process(record) { - record.Key = new RawData(); - record.Key.Raw = "foobar"; - return record; - }` - underTest, err := New(src, zerolog.Nop()) - is.NoErr(err) // expected no error when creating the JS processor - - in := underTest.InspectIn(ctx, "test-id") - out := underTest.InspectOut(ctx, "test-id") - underTest.Close() - - // incoming records session should be closed - _, got, err := cchan.ChanOut[record.Record](in.C).RecvTimeout(ctx, 100*time.Millisecond) - is.NoErr(err) - is.True(!got) - - // outgoing records session should be closed - _, got, err = cchan.ChanOut[record.Record](out.C).RecvTimeout(ctx, 100*time.Millisecond) - is.NoErr(err) - is.True(!got) -} - -func TestJSProcessor_JavaScriptException(t *testing.T) { - is := is.New(t) - - src := `function process(record) { - var m; - m.test - }` - underTest, err := New(src, zerolog.Nop()) - is.NoErr(err) // expected no error when creating the JS processor - - r := record.Record{ - Key: record.RawData{Raw: []byte("test key")}, - Payload: record.Change{ - Before: nil, - After: record.RawData{Raw: []byte("test payload")}, - }, - } - - got, err := underTest.Process(context.Background(), r) - is.True(err != nil) // expected error - target := &goja.Exception{} - is.True(cerrors.As(err, &target)) // expected a goja.Exception - is.Equal(record.Record{}, got) // expected a zero record -} - -func TestJSProcessor_BrokenJSCode(t *testing.T) { - is := is.New(t) - - src := `function {` - _, err := New(src, zerolog.Nop()) - is.True(err != nil) // expected error for invalid JS code - target := &goja.CompilerSyntaxError{} - is.True(cerrors.As(err, &target)) // expected a goja.CompilerSyntaxError -} - -func TestJSProcessor_ScriptWithMultipleFunctions(t *testing.T) { - is := is.New(t) - - src := ` - function getValue() { - return "updated_value"; - } - - function process(record) { - record.Metadata["updated_key"] = getValue() - return record; - } - ` - underTest, err := New(src, zerolog.Nop()) - is.NoErr(err) // expected no error when creating the JS processor - - r := record.Record{ - Metadata: record.Metadata{ - "old_key": "old_value", - }, - } - - got, err := underTest.Process(context.Background(), r) - is.NoErr(err) // expected no error when processing record - is.Equal( - record.Record{ - Metadata: record.Metadata{ - "old_key": "old_value", - "updated_key": "updated_value", - }, - }, - got, - ) // expected different record -} diff --git a/pkg/processor/runnable_processor.go b/pkg/processor/runnable_processor.go new file mode 100644 index 000000000..5034fb5a0 --- /dev/null +++ b/pkg/processor/runnable_processor.go @@ -0,0 +1,147 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package processor + +import ( + "context" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/record" +) + +// RunnableProcessor is a stream.Processor which has been +// initialized and is ready to be used in a pipeline. +type RunnableProcessor struct { + *Instance + proc sdk.Processor + cond *processorCondition +} + +func newRunnableProcessor( + proc sdk.Processor, + cond *processorCondition, + i *Instance, +) *RunnableProcessor { + return &RunnableProcessor{ + Instance: i, + proc: proc, + cond: cond, + } +} + +func (p *RunnableProcessor) Open(ctx context.Context) error { + err := p.proc.Configure(ctx, p.Config.Settings) + if err != nil { + return cerrors.Errorf("failed configuring processor: %w", err) + } + + err = p.proc.Open(ctx) + if err != nil { + return cerrors.Errorf("failed opening processor: %w", err) + } + + return nil +} + +func (p *RunnableProcessor) Process(ctx context.Context, records []opencdc.Record) []sdk.ProcessedRecord { + for _, inRec := range records { + p.inInsp.Send(ctx, record.FromOpenCDC(inRec)) + } + + var outRecs []sdk.ProcessedRecord + if p.cond == nil { + outRecs = p.proc.Process(ctx, records) + } else { + // We need to first evaluate condition for each record. + + // TODO reuse these slices or at least use a pool + // keptRecords are records that will be sent to the processor + keptRecords := make([]opencdc.Record, 0, len(records)) + // passthroughRecordIndexes are indexes of records that are just passed + // through to the other side. + passthroughRecordIndexes := make([]int, 0, len(records)) + + var err error + + for i, rec := range records { + var keep bool + keep, err = p.cond.Evaluate(rec) + if err != nil { + err = cerrors.Errorf("failed evaluating condition: %w", err) + break + } + + if keep { + keptRecords = append(keptRecords, rec) + } else { + passthroughRecordIndexes = append(passthroughRecordIndexes, i) + } + } + + if len(keptRecords) > 0 { + outRecs = p.proc.Process(ctx, keptRecords) + if len(outRecs) > len(keptRecords) { + return []sdk.ProcessedRecord{ + sdk.ErrorRecord{Error: cerrors.New("processor returned more records than input")}, + } + } + } + if err != nil { + outRecs = append(outRecs, sdk.ErrorRecord{Error: err}) + } + + // Add passthrough records back into the resultset and keep the + // original order of the records. + if len(passthroughRecordIndexes) == len(records) { + // Optimization for the case where no records are kept + outRecs = make([]sdk.ProcessedRecord, len(records)) + for i, rec := range records { + outRecs[i] = sdk.SingleRecord(rec) + } + } else if len(passthroughRecordIndexes) > 0 { + tmp := make([]sdk.ProcessedRecord, len(outRecs)+len(passthroughRecordIndexes)) + prevIndex := -1 + for i, index := range passthroughRecordIndexes { + // TODO index-i can be out of bounds if the processor returns + // fewer records than the input. + copy(tmp[prevIndex+1:index], outRecs[prevIndex-i+1:index-i]) + tmp[index] = sdk.SingleRecord(records[index]) + prevIndex = index + } + // if the last index is not the last record, copy the rest + if passthroughRecordIndexes[len(passthroughRecordIndexes)-1] != len(tmp)-1 { + copy(tmp[prevIndex+1:], outRecs[prevIndex-len(passthroughRecordIndexes)+1:]) + } + outRecs = tmp + } + } + + for _, outRec := range outRecs { + singleRec, ok := outRec.(sdk.SingleRecord) + if ok { + p.outInsp.Send(ctx, record.FromOpenCDC(opencdc.Record(singleRec))) + } + } + + return outRecs +} + +func (p *RunnableProcessor) Teardown(ctx context.Context) error { + err := p.proc.Teardown(ctx) + p.running = false + return err +} diff --git a/pkg/processor/runnable_processor_test.go b/pkg/processor/runnable_processor_test.go new file mode 100644 index 000000000..5b057513f --- /dev/null +++ b/pkg/processor/runnable_processor_test.go @@ -0,0 +1,384 @@ +// Copyright © 2024 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package processor + +import ( + "context" + "testing" + "time" + + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cchan" + "github.com/conduitio/conduit/pkg/foundation/cerrors" + "github.com/conduitio/conduit/pkg/foundation/log" + "github.com/conduitio/conduit/pkg/inspector" + "github.com/conduitio/conduit/pkg/plugin/processor/mock" + "github.com/conduitio/conduit/pkg/record" + "github.com/matryer/is" + "go.uber.org/mock/gomock" +) + +func TestRunnableProcessor_Open(t *testing.T) { + ctx := context.Background() + inst := &Instance{ + Config: Config{ + Settings: map[string]string{ + "foo": "bar", + }, + Workers: 123, + }, + } + + testCases := []struct { + name string + cfgErr error + openErr error + wantErr error + }{ + { + name: "success", + cfgErr: nil, + openErr: nil, + wantErr: nil, + }, + { + name: "configuration error", + cfgErr: cerrors.New("config is wrong"), + openErr: nil, + wantErr: cerrors.New("failed configuring processor: config is wrong"), + }, + { + name: "open error", + cfgErr: nil, + openErr: cerrors.New("open method exploded"), + wantErr: cerrors.New("failed opening processor: open method exploded"), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + proc := mock.NewProcessor(gomock.NewController(t)) + proc.EXPECT(). + Configure(gomock.Any(), inst.Config.Settings). + Return(tc.cfgErr) + // If there was a configuration error, + // then Open() should not be called. + if tc.cfgErr == nil { + proc.EXPECT().Open(gomock.Any()).Return(tc.openErr) + } + + underTest := newRunnableProcessor(proc, nil, inst) + err := underTest.Open(ctx) + if tc.wantErr == nil { + is.NoErr(err) + } else { + is.Equal(tc.wantErr.Error(), err.Error()) + } + }) + } +} + +func TestRunnableProcessor_Teardown(t *testing.T) { + ctx := context.Background() + inst := &Instance{ + Config: Config{ + Settings: map[string]string{ + "foo": "bar", + }, + Workers: 123, + }, + } + + testCases := []struct { + name string + teardownErr error + wantErr error + }{ + { + name: "no error", + teardownErr: nil, + wantErr: nil, + }, + { + name: "with error", + teardownErr: cerrors.New("boom!"), + wantErr: cerrors.New("boom!"), + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + proc := mock.NewProcessor(gomock.NewController(t)) + proc.EXPECT().Teardown(gomock.Any()).Return(tc.teardownErr) + + underTest := newRunnableProcessor(proc, nil, inst) + err := underTest.Teardown(ctx) + if tc.wantErr == nil { + is.NoErr(err) + } else { + is.Equal(tc.wantErr.Error(), err.Error()) + } + }) + } +} + +func TestRunnableProcessor_ProcessedRecordsInspected(t *testing.T) { + is := is.New(t) + ctx := context.Background() + inst := newTestInstance() + recsIn := []opencdc.Record{ + { + Key: opencdc.RawData("test key in"), + }, + } + recsOut := []sdk.ProcessedRecord{ + sdk.SingleRecord{ + Key: opencdc.RawData("test key out"), + }, + } + proc := mock.NewProcessor(gomock.NewController(t)) + proc.EXPECT().Process(gomock.Any(), recsIn).Return(recsOut) + + underTest := newRunnableProcessor(proc, nil, inst) + inSession := underTest.inInsp.NewSession(ctx, "id-in") + outSession := underTest.outInsp.NewSession(ctx, "id-out") + + _ = underTest.Process(ctx, recsIn) + defer underTest.Close() + + rec, gotRec, err := cchan.ChanOut[record.Record](inSession.C).RecvTimeout(ctx, 100*time.Millisecond) + is.True(gotRec) + is.NoErr(err) + is.Equal(record.FromOpenCDC(recsIn[0]), rec) + + rec, gotRec, err = cchan.ChanOut[record.Record](outSession.C).RecvTimeout(ctx, 100*time.Millisecond) + is.True(gotRec) + is.NoErr(err) + is.Equal(recsOut[0], sdk.SingleRecord(rec.ToOpenCDC())) +} + +func TestRunnableProcessor_FilteredRecordsNotInspected(t *testing.T) { + is := is.New(t) + ctx := context.Background() + inst := newTestInstance() + recsIn := []opencdc.Record{ + { + Key: opencdc.RawData("test key in"), + }, + } + + proc := mock.NewProcessor(gomock.NewController(t)) + proc.EXPECT().Process(gomock.Any(), recsIn).Return([]sdk.ProcessedRecord{sdk.FilterRecord{}}) + + underTest := newRunnableProcessor(proc, nil, inst) + inSession := underTest.inInsp.NewSession(ctx, "id-in") + outSession := underTest.outInsp.NewSession(ctx, "id-out") + + _ = underTest.Process(ctx, recsIn) + defer underTest.Close() + + rec, gotRec, err := cchan.ChanOut[record.Record](inSession.C).RecvTimeout(ctx, 100*time.Millisecond) + is.True(gotRec) + is.NoErr(err) + is.Equal(record.FromOpenCDC(recsIn[0]), rec) + + _, gotRec, err = cchan.ChanOut[record.Record](outSession.C).RecvTimeout(ctx, 100*time.Millisecond) + is.True(!gotRec) + is.True(cerrors.Is(err, context.DeadlineExceeded)) +} + +func TestRunnableProcessor_ErrorRecordsNotInspected(t *testing.T) { + is := is.New(t) + ctx := context.Background() + inst := newTestInstance() + recsIn := []opencdc.Record{ + { + Key: opencdc.RawData("test key in"), + }, + } + + proc := mock.NewProcessor(gomock.NewController(t)) + proc.EXPECT().Process(gomock.Any(), recsIn).Return([]sdk.ProcessedRecord{sdk.ErrorRecord{}}) + + underTest := newRunnableProcessor(proc, nil, inst) + inSession := underTest.inInsp.NewSession(ctx, "id-in") + outSession := underTest.outInsp.NewSession(ctx, "id-out") + + _ = underTest.Process(ctx, recsIn) + defer underTest.Close() + + rec, gotRec, err := cchan.ChanOut[record.Record](inSession.C).RecvTimeout(ctx, 100*time.Millisecond) + is.True(gotRec) + is.NoErr(err) + is.Equal(record.FromOpenCDC(recsIn[0]), rec) + + _, gotRec, err = cchan.ChanOut[record.Record](outSession.C).RecvTimeout(ctx, 100*time.Millisecond) + is.True(!gotRec) + is.True(cerrors.Is(err, context.DeadlineExceeded)) +} + +func TestRunnableProcessor_Process_ConditionNotMatching(t *testing.T) { + is := is.New(t) + ctx := context.Background() + inst := newTestInstance() + recsIn := []opencdc.Record{ + { + Metadata: opencdc.Metadata{"key": "something"}, + }, + } + + proc := mock.NewProcessor(gomock.NewController(t)) + + condition, err := newProcessorCondition(`{{ eq .Metadata.key "val" }}`) + is.NoErr(err) + underTest := newRunnableProcessor( + proc, + condition, + inst, + ) + + recsOut := underTest.Process(ctx, recsIn) + defer underTest.Close() + + is.Equal([]sdk.ProcessedRecord{sdk.SingleRecord(recsIn[0])}, recsOut) +} + +func TestRunnableProcessor_Process_ConditionMatching(t *testing.T) { + is := is.New(t) + ctx := context.Background() + inst := newTestInstance() + recsIn := []opencdc.Record{ + { + Metadata: opencdc.Metadata{"key": "val"}, + }, + } + + wantRecs := []sdk.ProcessedRecord{sdk.SingleRecord{Key: opencdc.RawData(`a key`)}} + proc := mock.NewProcessor(gomock.NewController(t)) + proc.EXPECT().Process(ctx, recsIn).Return(wantRecs) + + condition, err := newProcessorCondition(`{{ eq .Metadata.key "val" }}`) + is.NoErr(err) + underTest := newRunnableProcessor( + proc, + condition, + inst, + ) + + gotRecs := underTest.Process(ctx, recsIn) + defer underTest.Close() + + is.Equal(wantRecs, gotRecs) +} + +func TestRunnableProcessor_Process_ConditionError(t *testing.T) { + is := is.New(t) + ctx := context.Background() + inst := newTestInstance() + recsIn := []opencdc.Record{ + { + Metadata: opencdc.Metadata{"key": "val"}, + }, + } + + proc := mock.NewProcessor(gomock.NewController(t)) + + condition, err := newProcessorCondition("junk") + is.NoErr(err) + underTest := newRunnableProcessor( + proc, + condition, + inst, + ) + + gotRecs := underTest.Process(ctx, recsIn) + defer underTest.Close() + + is.Equal(1, len(gotRecs)) + gotRec, gotErr := gotRecs[0].(sdk.ErrorRecord) + is.True(gotErr) + is.Equal( + "failed evaluating condition: error converting the condition go-template output to boolean, "+ + "strconv.ParseBool: parsing \"junk\": invalid syntax: strconv.ParseBool: parsing \"junk\": "+ + "invalid syntax", + gotRec.Error.Error(), + ) +} + +func TestRunnableProcessor_Process_Batch(t *testing.T) { + is := is.New(t) + ctx := context.Background() + inst := newTestInstance() + recsIn := []opencdc.Record{ + {Metadata: opencdc.Metadata{"key": "no", "rec": "1"}}, + {Metadata: opencdc.Metadata{"key": "yes", "rec": "2"}}, + {Metadata: opencdc.Metadata{"key": "no", "rec": "3"}}, + {Metadata: opencdc.Metadata{"key": "no", "rec": "4"}}, + {Metadata: opencdc.Metadata{"key": "yes", "rec": "5"}}, + {Metadata: opencdc.Metadata{"key": "no", "rec": "6"}}, + {Metadata: opencdc.Metadata{"key": "yes", "rec": "7"}}, + } + + wantRecs := []sdk.ProcessedRecord{ + sdk.SingleRecord(recsIn[0]), + sdk.SingleRecord{Metadata: opencdc.Metadata{"key": "yes", "rec": "2", "processed": "true"}}, + sdk.SingleRecord(recsIn[2]), + sdk.SingleRecord(recsIn[3]), + sdk.SingleRecord{Metadata: opencdc.Metadata{"key": "yes", "rec": "5", "processed": "true"}}, + sdk.SingleRecord(recsIn[5]), + sdk.SingleRecord{Metadata: opencdc.Metadata{"key": "yes", "rec": "7", "processed": "true"}}, + } + proc := mock.NewProcessor(gomock.NewController(t)) + proc.EXPECT().Process(ctx, []opencdc.Record{recsIn[1], recsIn[4], recsIn[6]}).DoAndReturn(func(_ context.Context, recs []opencdc.Record) []sdk.ProcessedRecord { + out := make([]sdk.ProcessedRecord, 0, len(recs)) + for _, rec := range recs { + rec.Metadata["processed"] = "true" + out = append(out, sdk.SingleRecord{Metadata: rec.Metadata}) + } + return out + }) + + condition, err := newProcessorCondition(`{{ eq .Metadata.key "yes" }}`) + is.NoErr(err) + underTest := newRunnableProcessor( + proc, + condition, + inst, + ) + + gotRecs := underTest.Process(ctx, recsIn) + defer underTest.Close() + + is.Equal(wantRecs, gotRecs) +} + +func newTestInstance() *Instance { + return &Instance{ + Config: Config{ + Settings: map[string]string{ + "foo": "bar", + }, + Workers: 123, + }, + + inInsp: inspector.New(log.Nop(), inspector.DefaultBufferSize), + outInsp: inspector.New(log.Nop(), inspector.DefaultBufferSize), + } +} diff --git a/pkg/processor/service.go b/pkg/processor/service.go index d9a60df7b..8c50a7638 100644 --- a/pkg/processor/service.go +++ b/pkg/processor/service.go @@ -12,33 +12,40 @@ // See the License for the specific language governing permissions and // limitations under the License. +//go:generate mockgen -destination=mock/plugin_service.go -package=mock -mock_names=PluginService=PluginService . PluginService + package processor import ( "context" "time" + sdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/database" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/foundation/metrics/measure" ) +type PluginService interface { + NewProcessor(ctx context.Context, pluginName string, id string) (sdk.Processor, error) +} + type Service struct { logger log.CtxLogger - registry *BuilderRegistry + registry PluginService instances map[string]*Instance store *Store } -// NewService creates a new processor service. -func NewService(logger log.CtxLogger, db database.DB, registry *BuilderRegistry) *Service { +// NewService creates a new processor plugin service. +func NewService(logger log.CtxLogger, db database.DB, registry PluginService) *Service { return &Service{ logger: logger.WithComponent("processor.Service"), registry: registry, instances: make(map[string]*Instance), - store: NewStore(db, registry), + store: NewStore(db), } } @@ -54,7 +61,8 @@ func (s *Service) Init(ctx context.Context) error { s.logger.Info(ctx).Int("count", len(s.instances)).Msg("processors initialized") for _, i := range instances { - measure.ProcessorsGauge.WithValues(i.Type).Inc() + i.init(s.logger) + measure.ProcessorsGauge.WithValues(i.Plugin).Inc() } return nil @@ -83,11 +91,29 @@ func (s *Service) Get(_ context.Context, id string) (*Instance, error) { return ins, nil } +func (s *Service) MakeRunnableProcessor(ctx context.Context, i *Instance) (*RunnableProcessor, error) { + if i.running { + return nil, ErrProcessorRunning + } + + p, err := s.registry.NewProcessor(ctx, i.Plugin, i.ID) + if err != nil { + return nil, err + } + cond, err := newProcessorCondition(i.Condition) + if err != nil { + return nil, cerrors.Errorf("invalid condition: %w", err) + } + + i.running = true + return newRunnableProcessor(p, cond, i), nil +} + // Create will create a new processor instance. func (s *Service) Create( ctx context.Context, id string, - procType string, + plugin string, parent Parent, cfg Config, pt ProvisionType, @@ -100,14 +126,14 @@ func (s *Service) Create( cfg.Workers = 1 } - builder, err := s.registry.Get(procType) + // check if the processor plugin exists + p, err := s.registry.NewProcessor(ctx, plugin, id) if err != nil { - return nil, err + return nil, cerrors.Errorf("could not get processor: %w", err) } - - p, err := builder(cfg) + err = p.Teardown(ctx) if err != nil { - return nil, cerrors.Errorf("could not build processor: %w", err) + s.logger.Warn(ctx).Err(err).Msg("processor teardown failed") } now := time.Now() @@ -116,12 +142,12 @@ func (s *Service) Create( UpdatedAt: now, CreatedAt: now, ProvisionedBy: pt, - Type: procType, + Plugin: plugin, Parent: parent, Config: cfg, - Processor: p, Condition: cond, } + instance.init(s.logger) // persist instance err = s.store.Set(ctx, instance.ID, instance) @@ -130,7 +156,7 @@ func (s *Service) Create( } s.instances[instance.ID] = instance - measure.ProcessorsGauge.WithValues(procType).Inc() + measure.ProcessorsGauge.WithValues(plugin).Inc() return instance, nil } @@ -142,15 +168,10 @@ func (s *Service) Update(ctx context.Context, id string, cfg Config) (*Instance, return nil, err } - // this can't really fail, this call already passed when creating the instance - builder, _ := s.registry.Get(instance.Type) - - p, err := builder(cfg) - if err != nil { - return nil, cerrors.Errorf("could not build processor: %w", err) + if instance.running { + return nil, cerrors.Errorf("could not update processor instance (ID: %s): %w", id, ErrProcessorRunning) } - instance.Processor = p instance.Config = cfg instance.UpdatedAt = time.Now() @@ -171,13 +192,17 @@ func (s *Service) Delete(ctx context.Context, id string) error { return err } + if instance.running { + return cerrors.Errorf("could not delete processor instance (ID: %s): %w", id, ErrProcessorRunning) + } + err = s.store.Delete(ctx, id) if err != nil { return cerrors.Errorf("could not delete processor instance from store: %w", err) } delete(s.instances, id) - instance.Processor.Close() - measure.ProcessorsGauge.WithValues(instance.Type).Dec() + instance.Close() + measure.ProcessorsGauge.WithValues(instance.Plugin).Dec() return nil } diff --git a/pkg/processor/service_test.go b/pkg/processor/service_test.go index 258f8c564..c52586cb7 100644 --- a/pkg/processor/service_test.go +++ b/pkg/processor/service_test.go @@ -12,17 +12,20 @@ // See the License for the specific language governing permissions and // limitations under the License. -package processor_test +package processor import ( "context" "testing" + sdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/database/inmemory" dbmock "github.com/conduitio/conduit/pkg/foundation/database/mock" "github.com/conduitio/conduit/pkg/foundation/log" - "github.com/conduitio/conduit/pkg/processor" + "github.com/conduitio/conduit/pkg/plugin" + proc_plugin "github.com/conduitio/conduit/pkg/plugin/processor" + proc_mock "github.com/conduitio/conduit/pkg/plugin/processor/mock" "github.com/conduitio/conduit/pkg/processor/mock" "github.com/google/uuid" "github.com/matryer/is" @@ -33,22 +36,30 @@ func TestService_Init_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) procType := "processor-type" - p := mock.NewProcessor(ctrl) + p := proc_mock.NewProcessor(gomock.NewController(t)) + p.EXPECT().Teardown(ctx).Return(nil) - registry := newTestBuilderRegistry(is, map[string]processor.Interface{procType: p}) - service := processor.NewService(log.Nop(), db, registry) + registry := newPluginService(t, map[string]sdk.Processor{procType: p}) + service := NewService(log.Nop(), db, registry) // create a processor instance - _, err := service.Create(ctx, uuid.NewString(), procType, processor.Parent{}, processor.Config{}, processor.ProvisionTypeAPI, "") + _, err := service.Create( + ctx, + uuid.NewString(), + procType, + Parent{}, + Config{}, + ProvisionTypeAPI, + "", + ) is.NoErr(err) want := service.List(ctx) // create a new processor service and initialize it - service = processor.NewService(log.Nop(), db, registry) + service = NewService(log.Nop(), db, registry) err = service.Init(ctx) is.NoErr(err) @@ -80,7 +91,7 @@ func TestService_Check(t *testing.T) { t.Run(tc.name, func(t *testing.T) { is := is.New(t) db.EXPECT().Ping(gomock.Any()).Return(tc.wantErr) - service := processor.NewService(logger, db, processor.NewBuilderRegistry()) + service := NewService(logger, db, &proc_plugin.PluginService{}) gotErr := service.Check(ctx) is.Equal(tc.wantErr, gotErr) @@ -92,58 +103,72 @@ func TestService_Create_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) + p := proc_mock.NewProcessor(gomock.NewController(t)) + p.EXPECT().Teardown(ctx).Return(nil) - p := mock.NewProcessor(ctrl) - - want := &processor.Instance{ - ID: "uuid will be taken from the result", - Type: "processor-type", - Parent: processor.Parent{ + want := &Instance{ + ID: "uuid will be taken from the result", + Plugin: "processor-type", + Parent: Parent{ ID: uuid.NewString(), - Type: processor.ParentTypeConnector, + Type: ParentTypeConnector, }, - Config: processor.Config{ + Config: Config{ Settings: map[string]string{ "processor-config-field-1": "foo", "processor-config-field-2": "bar", }, Workers: 1, }, - Processor: p, } - registry := newTestBuilderRegistry(is, map[string]processor.Interface{want.Type: p}) - service := processor.NewService(log.Nop(), db, registry) + registry := newPluginService(t, map[string]sdk.Processor{want.Plugin: p}) + service := NewService(log.Nop(), db, registry) - got, err := service.Create(ctx, want.ID, want.Type, want.Parent, want.Config, processor.ProvisionTypeAPI, "") + got, err := service.Create( + ctx, + want.ID, + want.Plugin, + want.Parent, + want.Config, + ProvisionTypeAPI, + "", + ) is.NoErr(err) want.ID = got.ID // uuid is random want.CreatedAt = got.CreatedAt want.UpdatedAt = got.UpdatedAt + + // just ensure inspectors are set + is.True(got.inInsp != nil) + is.True(got.outInsp != nil) + got.inInsp = nil + got.outInsp = nil + is.Equal(want, got) } -func TestService_Create_BuilderNotFound(t *testing.T) { +func TestService_Init_PluginNotFound(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - emptyRegistry := processor.NewBuilderRegistry() - service := processor.NewService(log.Nop(), db, emptyRegistry) + procGetter := mock.NewPluginService(gomock.NewController(t)) + procGetter.EXPECT(). + NewProcessor(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil, plugin.ErrPluginNotFound) + service := NewService(log.Nop(), db, procGetter) - got, err := service.Create( + _, err := service.Create( ctx, uuid.NewString(), "non-existent processor", - processor.Parent{}, - processor.Config{}, - processor.ProvisionTypeAPI, + Parent{}, + Config{}, + ProvisionTypeAPI, "{{true}}", ) - - is.True(err != nil) - is.Equal(got, nil) + is.True(cerrors.Is(err, plugin.ErrPluginNotFound)) } func TestService_Create_BuilderFail(t *testing.T) { @@ -151,51 +176,45 @@ func TestService_Create_BuilderFail(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - procType := "processor-type" wantErr := cerrors.New("builder failed") - registry := processor.NewBuilderRegistry() - err := registry.Register( - procType, - func(got processor.Config) (processor.Interface, error) { - return nil, wantErr - }, - ) - is.NoErr(err) + procGetter := mock.NewPluginService(gomock.NewController(t)) + procGetter.EXPECT(). + NewProcessor(gomock.Any(), gomock.Any(), gomock.Any()). + Return(nil, wantErr) + service := NewService(log.Nop(), db, procGetter) - service := processor.NewService(log.Nop(), db, registry) - - got, err := service.Create( + i, err := service.Create( ctx, uuid.NewString(), - procType, - processor.Parent{}, - processor.Config{}, - processor.ProvisionTypeAPI, + "processor-type", + Parent{}, + Config{}, + ProvisionTypeAPI, "{{true}}", ) is.True(cerrors.Is(err, wantErr)) // expected builder error - is.Equal(got, nil) + is.Equal(i, nil) } func TestService_Create_WorkersNegative(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - registry := processor.NewBuilderRegistry() - service := processor.NewService(log.Nop(), db, registry) + service := NewService(log.Nop(), db, &proc_plugin.PluginService{}) got, err := service.Create( ctx, uuid.NewString(), "processor-type", - processor.Parent{}, - processor.Config{}, - processor.ProvisionTypeAPI, + Parent{}, + Config{Workers: -1}, + ProvisionTypeAPI, "{{true}}", ) is.True(err != nil) // expected workers error + is.Equal("processor workers can't be negative", err.Error()) is.Equal(got, nil) } @@ -203,24 +222,71 @@ func TestService_Delete_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) procType := "processor-type" - p := mock.NewProcessor(ctrl) - p.EXPECT().Close() + p := proc_mock.NewProcessor(gomock.NewController(t)) + p.EXPECT().Teardown(ctx).Return(nil) + registry := newPluginService(t, map[string]sdk.Processor{procType: p}) + service := NewService(log.Nop(), db, registry) - registry := newTestBuilderRegistry(is, map[string]processor.Interface{procType: p}) - service := processor.NewService(log.Nop(), db, registry) + // create a processor instance + i, err := service.Create( + ctx, + uuid.NewString(), + procType, + Parent{}, + Config{}, + ProvisionTypeAPI, + "cond", + ) + is.NoErr(err) + + err = service.Delete(ctx, i.ID) + is.NoErr(err) + + got, err := service.Get(ctx, i.ID) + is.True(cerrors.Is(err, ErrInstanceNotFound)) // expected instance not found error + is.Equal(got, nil) +} + +func TestService_Delete_ProcessorIsRunning(t *testing.T) { + is := is.New(t) + ctx := context.Background() + db := &inmemory.DB{} + + procType := "processor-type" + p := proc_mock.NewProcessor(gomock.NewController(t)) + p.EXPECT().Teardown(ctx).Return(nil).AnyTimes() + registry := newPluginService(t, map[string]sdk.Processor{procType: p}) + service := NewService(log.Nop(), db, registry) // create a processor instance - i, err := service.Create(ctx, uuid.NewString(), procType, processor.Parent{}, processor.Config{}, processor.ProvisionTypeAPI, "cond") + i, err := service.Create( + ctx, + uuid.NewString(), + procType, + Parent{}, + Config{}, + ProvisionTypeAPI, + "cond", + ) + is.NoErr(err) + + // make a runnable processor + proc, err := service.MakeRunnableProcessor(ctx, i) + is.NoErr(err) + + err = service.Delete(ctx, i.ID) + is.True(cerrors.Is(err, ErrProcessorRunning)) + + err = proc.Teardown(ctx) is.NoErr(err) err = service.Delete(ctx, i.ID) is.NoErr(err) got, err := service.Get(ctx, i.ID) - is.True(cerrors.Is(err, processor.ErrInstanceNotFound)) // expected instance not found error + is.True(cerrors.Is(err, ErrInstanceNotFound)) // expected instance not found error is.Equal(got, nil) } @@ -228,26 +294,26 @@ func TestService_Delete_Fail(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - service := processor.NewService(log.Nop(), db, processor.NewBuilderRegistry()) + service := NewService(log.Nop(), db, &proc_plugin.PluginService{}) err := service.Delete(ctx, "non-existent processor") - is.True(cerrors.Is(err, processor.ErrInstanceNotFound)) // expected instance not found error + is.True(cerrors.Is(err, ErrInstanceNotFound)) // expected instance not found error } func TestService_Get_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) procType := "processor-type" - p := mock.NewProcessor(ctrl) + p := proc_mock.NewProcessor(gomock.NewController(t)) + p.EXPECT().Teardown(ctx).Return(nil) - registry := newTestBuilderRegistry(is, map[string]processor.Interface{procType: p}) - service := processor.NewService(log.Nop(), db, registry) + registry := newPluginService(t, map[string]sdk.Processor{procType: p}) + service := NewService(log.Nop(), db, registry) // create a processor instance - want, err := service.Create(ctx, uuid.NewString(), procType, processor.Parent{}, processor.Config{}, processor.ProvisionTypeAPI, "cond") + want, err := service.Create(ctx, uuid.NewString(), procType, Parent{}, Config{}, ProvisionTypeAPI, "cond") is.NoErr(err) got, err := service.Get(ctx, want.ID) @@ -259,10 +325,10 @@ func TestService_Get_Fail(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - service := processor.NewService(log.Nop(), db, processor.NewBuilderRegistry()) + service := NewService(log.Nop(), db, &proc_plugin.PluginService{}) got, err := service.Get(ctx, "non-existent processor") - is.True(cerrors.Is(err, processor.ErrInstanceNotFound)) // expected instance not found error + is.True(cerrors.Is(err, ErrInstanceNotFound)) // expected instance not found error is.Equal(got, nil) } @@ -270,7 +336,7 @@ func TestService_List_Empty(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - service := processor.NewService(log.Nop(), db, processor.NewBuilderRegistry()) + service := NewService(log.Nop(), db, &proc_plugin.PluginService{}) instances := service.List(ctx) is.True(instances != nil) @@ -281,43 +347,43 @@ func TestService_List_Some(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) procType := "processor-type" - p := mock.NewProcessor(ctrl) + p := proc_mock.NewProcessor(gomock.NewController(t)) + p.EXPECT().Teardown(ctx).Return(nil).Times(3) - registry := newTestBuilderRegistry(is, map[string]processor.Interface{procType: p}) - service := processor.NewService(log.Nop(), db, registry) + registry := newPluginService(t, map[string]sdk.Processor{procType: p}) + service := NewService(log.Nop(), db, registry) // create a couple of processor instances - i1, err := service.Create(ctx, uuid.NewString(), procType, processor.Parent{}, processor.Config{}, processor.ProvisionTypeAPI, "") + i1, err := service.Create(ctx, uuid.NewString(), procType, Parent{}, Config{}, ProvisionTypeAPI, "") is.NoErr(err) - i2, err := service.Create(ctx, uuid.NewString(), procType, processor.Parent{}, processor.Config{}, processor.ProvisionTypeAPI, "") + i2, err := service.Create(ctx, uuid.NewString(), procType, Parent{}, Config{}, ProvisionTypeAPI, "") is.NoErr(err) - i3, err := service.Create(ctx, uuid.NewString(), procType, processor.Parent{}, processor.Config{}, processor.ProvisionTypeAPI, "") + i3, err := service.Create(ctx, uuid.NewString(), procType, Parent{}, Config{}, ProvisionTypeAPI, "") is.NoErr(err) instances := service.List(ctx) - is.Equal(map[string]*processor.Instance{i1.ID: i1, i2.ID: i2, i3.ID: i3}, instances) + is.Equal(map[string]*Instance{i1.ID: i1, i2.ID: i2, i3.ID: i3}, instances) } func TestService_Update_Success(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) procType := "processor-type" - p := mock.NewProcessor(ctrl) + p := proc_mock.NewProcessor(gomock.NewController(t)) + p.EXPECT().Teardown(ctx).Return(nil) - registry := newTestBuilderRegistry(is, map[string]processor.Interface{procType: p}) - service := processor.NewService(log.Nop(), db, registry) + registry := newPluginService(t, map[string]sdk.Processor{procType: p}) + service := NewService(log.Nop(), db, registry) // create a processor instance - want, err := service.Create(ctx, uuid.NewString(), procType, processor.Parent{}, processor.Config{}, processor.ProvisionTypeAPI, "") + want, err := service.Create(ctx, uuid.NewString(), procType, Parent{}, Config{}, ProvisionTypeAPI, "") is.NoErr(err) - newConfig := processor.Config{ + newConfig := Config{ Settings: map[string]string{ "processor-config-field-1": "foo", "processor-config-field-2": "bar", @@ -338,29 +404,22 @@ func TestService_Update_Fail(t *testing.T) { is := is.New(t) ctx := context.Background() db := &inmemory.DB{} - service := processor.NewService(log.Nop(), db, processor.NewBuilderRegistry()) + service := NewService(log.Nop(), db, &proc_plugin.PluginService{}) - got, err := service.Update(ctx, "non-existent processor", processor.Config{}) - is.True(cerrors.Is(err, processor.ErrInstanceNotFound)) // expected instance not found error + got, err := service.Update(ctx, "non-existent processor", Config{}) + is.True(cerrors.Is(err, ErrInstanceNotFound)) // expected instance not found error is.Equal(got, nil) } -// newTestBuilderRegistry creates a registry with builders for the supplied +// newPluginService creates a registry with builders for the supplied // processors map keyed by processor type. If a value in the map is nil then a // builder will be registered that returns an error. -func newTestBuilderRegistry(is *is.I, processors map[string]processor.Interface) *processor.BuilderRegistry { - registry := processor.NewBuilderRegistry() - for procType, p := range processors { - err := registry.Register( - procType, - func(got processor.Config) (processor.Interface, error) { - if p != nil { - return p, nil - } - return nil, cerrors.New("builder error") - }, - ) - is.NoErr(err) +func newPluginService(t *testing.T, processors map[string]sdk.Processor) *mock.PluginService { + pg := mock.NewPluginService(gomock.NewController(t)) + for name, proc := range processors { + pg.EXPECT(). + NewProcessor(gomock.Any(), name, gomock.Any()).AnyTimes(). + Return(proc, nil) } - return registry + return pg } diff --git a/pkg/processor/store.go b/pkg/processor/store.go index d62767037..3687e21fe 100644 --- a/pkg/processor/store.go +++ b/pkg/processor/store.go @@ -33,14 +33,12 @@ const ( // Store handles the persistence and fetching of processor instances. type Store struct { - db database.DB - registry *BuilderRegistry + db database.DB } -func NewStore(db database.DB, registry *BuilderRegistry) *Store { +func NewStore(db database.DB) *Store { return &Store{ - db: db, - registry: registry, + db: db, } } @@ -132,13 +130,10 @@ func (*Store) trimKeyPrefix(key string) string { // encode encodes a instance from *Instance to []byte. It uses storeInstance in // the background to encode the instance including the processor type. -func (*Store) encode(instance *Instance) ([]byte, error) { - i := *instance // create copy of instance as to not modify it - i.Processor = nil // do not persist processor - +func (*Store) encode(i *Instance) ([]byte, error) { var b bytes.Buffer enc := json.NewEncoder(&b) - err := enc.Encode(i) + err := enc.Encode(*i) if err != nil { return nil, err } @@ -156,17 +151,5 @@ func (s *Store) decode(raw []byte) (*Instance, error) { if err != nil { return nil, err } - - builder, err := s.registry.Get(i.Type) - if err != nil { - return nil, cerrors.Errorf("could not get processor builder for instance %s: %w", i.ID, err) - } - - proc, err := builder(i.Config) - if err != nil { - return nil, cerrors.Errorf("could not create processor: %w", err) - } - - i.Processor = proc return &i, nil } diff --git a/pkg/processor/store_test.go b/pkg/processor/store_test.go index 61f4cf21d..7cb838733 100644 --- a/pkg/processor/store_test.go +++ b/pkg/processor/store_test.go @@ -22,10 +22,8 @@ import ( "github.com/conduitio/conduit/pkg/foundation/database" "github.com/conduitio/conduit/pkg/foundation/database/inmemory" "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/processor/mock" "github.com/google/uuid" "github.com/matryer/is" - "go.uber.org/mock/gomock" ) func TestConfigStore_SetGet(t *testing.T) { @@ -33,18 +31,10 @@ func TestConfigStore_SetGet(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - processorType := "test-processor" - - registry := processor.NewBuilderRegistry() - registry.MustRegister(processorType, func(_ processor.Config) (processor.Interface, error) { - p := mock.NewProcessor(ctrl) - return p, nil - }) want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypePipeline, @@ -54,15 +44,10 @@ func TestConfigStore_SetGet(t *testing.T) { }, } - var err error - want.Processor, err = registry.MustGet(processorType)(want.Config) - is.NoErr(err) - - s := processor.NewStore(db, registry) + s := processor.NewStore(db) - err = s.Set(ctx, want.ID, want) + err := s.Set(ctx, want.ID, want) is.NoErr(err) - is.True(want.Processor != nil) // make sure processor is left untouched got, err := s.Get(ctx, want.ID) is.NoErr(err) @@ -71,25 +56,16 @@ func TestConfigStore_SetGet(t *testing.T) { func TestConfigStore_GetAll(t *testing.T) { is := is.New(t) - ctx := context.Background() db := &inmemory.DB{} - ctrl := gomock.NewController(t) - procType := "test-processor" - - registry := processor.NewBuilderRegistry() - registry.MustRegister(procType, func(_ processor.Config) (processor.Interface, error) { - p := mock.NewProcessor(ctrl) - return p, nil - }) - s := processor.NewStore(db, registry) + s := processor.NewStore(db) want := make(map[string]*processor.Instance) for i := 0; i < 10; i++ { instance := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypePipeline, @@ -102,11 +78,8 @@ func TestConfigStore_GetAll(t *testing.T) { // switch up parent types a bit instance.Parent.Type = processor.ParentTypeConnector } - var err error - instance.Processor, err = registry.MustGet(procType)(instance.Config) - is.NoErr(err) - err = s.Set(ctx, instance.ID, instance) + err := s.Set(ctx, instance.ID, instance) is.NoErr(err) want[instance.ID] = instance } @@ -121,14 +94,13 @@ func TestConfigStore_Delete(t *testing.T) { ctx := context.Background() db := &inmemory.DB{} - registry := processor.NewBuilderRegistry() want := &processor.Instance{ - ID: uuid.NewString(), - Type: "test-processor", + ID: uuid.NewString(), + Plugin: "test-processor", } - s := processor.NewStore(db, registry) + s := processor.NewStore(db) err := s.Set(ctx, want.ID, want) is.NoErr(err) diff --git a/pkg/provisioning/config/enrich_test.go b/pkg/provisioning/config/enrich_test.go index 14c4da1f5..239d0b770 100644 --- a/pkg/provisioning/config/enrich_test.go +++ b/pkg/provisioning/config/enrich_test.go @@ -44,7 +44,7 @@ func TestEnrich_DefaultValues(t *testing.T) { Processors: []Processor{ { ID: "proc2", - Type: "js", + Plugin: "js", Workers: 2, Settings: map[string]string{ "additionalProp1": "string", @@ -55,8 +55,8 @@ func TestEnrich_DefaultValues(t *testing.T) { }, Processors: []Processor{ { - ID: "proc1", - Type: "js", + ID: "proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", }, @@ -86,7 +86,7 @@ func TestEnrich_DefaultValues(t *testing.T) { Processors: []Processor{ { ID: "pipeline1:con1:proc2", - Type: "js", + Plugin: "js", Workers: 2, Settings: map[string]string{ "additionalProp1": "string", @@ -98,7 +98,7 @@ func TestEnrich_DefaultValues(t *testing.T) { Processors: []Processor{ { ID: "pipeline1:proc1", - Type: "js", + Plugin: "js", Workers: 1, Settings: map[string]string{ "additionalProp1": "string", diff --git a/pkg/provisioning/config/parser.go b/pkg/provisioning/config/parser.go index ea2878e00..b6cf10d72 100644 --- a/pkg/provisioning/config/parser.go +++ b/pkg/provisioning/config/parser.go @@ -40,7 +40,7 @@ type Connector struct { type Processor struct { ID string - Type string + Plugin string Settings map[string]string Workers int Condition string @@ -64,7 +64,7 @@ var ( ConnectorImmutableFields = []string{"Type", "Plugin"} ConnectorMutableFields = []string{"Name", "Settings", "Processors"} - ProcessorImmutableFields = []string{"Type"} + ProcessorImmutableFields = []string{"Plugin"} ProcessorMutableFields = []string{"Settings", "Workers", "Condition"} ) diff --git a/pkg/provisioning/config/validate.go b/pkg/provisioning/config/validate.go index f01ce77ec..177b39f3a 100644 --- a/pkg/provisioning/config/validate.go +++ b/pkg/provisioning/config/validate.go @@ -98,8 +98,11 @@ func validateProcessors(mp []Processor) error { var err error ids := make(map[string]bool) for _, cfg := range mp { - if cfg.Type == "" { - err = multierror.Append(err, cerrors.Errorf("processor %q: \"type\" is mandatory: %w", cfg.ID, ErrMandatoryField)) + if cfg.Plugin == "" { + err = multierror.Append( + err, + cerrors.Errorf("processor %q: \"plugin\" needs to be provided: %w", cfg.ID, ErrMandatoryField), + ) } if cfg.Workers < 0 { err = multierror.Append(err, cerrors.Errorf("processor %q: \"workers\" can't be negative: %w", cfg.ID, ErrInvalidField)) diff --git a/pkg/provisioning/config/validate_test.go b/pkg/provisioning/config/validate_test.go index 0548b1858..7df1e7b96 100644 --- a/pkg/provisioning/config/validate_test.go +++ b/pkg/provisioning/config/validate_test.go @@ -48,7 +48,7 @@ func TestValidator_MandatoryFields(t *testing.T) { Processors: []Processor{{ ID: "pipeline1proc1", // mandatory field - Type: "", + Plugin: "", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -219,7 +219,7 @@ func TestValidator_InvalidFields(t *testing.T) { Description: "desc1", Processors: []Processor{{ ID: "proc1", - Type: "js", + Plugin: "js", Settings: map[string]string{}, // invalid field Workers: -1, @@ -260,7 +260,7 @@ func TestValidator_MultiErrors(t *testing.T) { Processors: []Processor{{ ID: "pipeline1proc1", // mandatory field #2 - Type: "", + Plugin: "", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -270,7 +270,7 @@ func TestValidator_MultiErrors(t *testing.T) { Processors: []Processor{{ ID: "pipeline1proc1", // mandatory field #3 - Type: "", + Plugin: "", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -316,8 +316,8 @@ func TestValidator_DuplicateID(t *testing.T) { }, Processors: []Processor{ { - ID: "pipeline1proc1", - Type: "js", + ID: "pipeline1proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -336,8 +336,8 @@ func TestValidator_DuplicateID(t *testing.T) { }, Processors: []Processor{ { - ID: "pipeline1proc1", - Type: "js", + ID: "pipeline1proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -348,8 +348,8 @@ func TestValidator_DuplicateID(t *testing.T) { }, Processors: []Processor{ { - ID: "pipeline1proc1", - Type: "js", + ID: "pipeline1proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", diff --git a/pkg/provisioning/config/yaml/parser_test.go b/pkg/provisioning/config/yaml/parser_test.go index 5669bbce7..9852c3eb5 100644 --- a/pkg/provisioning/config/yaml/parser_test.go +++ b/pkg/provisioning/config/yaml/parser_test.go @@ -26,6 +26,7 @@ import ( v1 "github.com/conduitio/conduit/pkg/provisioning/config/yaml/v1" v2 "github.com/conduitio/conduit/pkg/provisioning/config/yaml/v2" "github.com/conduitio/yaml/v3" + "github.com/google/go-cmp/cmp" "github.com/matryer/is" "github.com/rs/zerolog" ) @@ -276,7 +277,7 @@ func TestParser_V2_Success(t *testing.T) { intPtr := func(i int) *int { return &i } want := Configurations{ v2.Configuration{ - Version: "2.0", + Version: "2.2", Pipelines: []v2.Pipeline{ { ID: "pipeline1", @@ -285,8 +286,8 @@ func TestParser_V2_Success(t *testing.T) { Description: "desc1", Processors: []v2.Processor{ { - ID: "pipeline1proc1", - Type: "js", + ID: "pipeline1proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -305,8 +306,8 @@ func TestParser_V2_Success(t *testing.T) { }, Processors: []v2.Processor{ { - ID: "proc1", - Type: "js", + ID: "proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -345,8 +346,8 @@ func TestParser_V2_Success(t *testing.T) { }, Processors: []v2.Processor{ { - ID: "con2proc1", - Type: "hoistfield", + ID: "con2proc1", + Plugin: "hoistfield", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -372,7 +373,72 @@ func TestParser_V2_Success(t *testing.T) { got, err := parser.ParseConfigurations(context.Background(), file) is.NoErr(err) - is.Equal(got, want) + diff := cmp.Diff(want, got) + if diff != "" { + t.Errorf("%v", diff) + } +} + +func TestParser_V2_BackwardsCompatibility(t *testing.T) { + is := is.New(t) + parser := NewParser(log.Nop()) + filepath := "./v2/testdata/pipelines6-bwc.yml" + want := Configurations{ + v2.Configuration{ + Version: "2.2", + Pipelines: []v2.Pipeline{ + { + ID: "pipeline6", + Status: "running", + Name: "pipeline6", + Description: "desc1", + Processors: []v2.Processor{ + { + ID: "pipeline1proc1", + Type: "js", + Settings: map[string]string{ + "additionalProp1": "string", + "additionalProp2": "string", + }, + }, + }, + Connectors: []v2.Connector{ + { + ID: "con1", + Type: "source", + Plugin: "builtin:s3", + Name: "s3-source", + Settings: map[string]string{ + "aws.region": "us-east-1", + "aws.bucket": "my-bucket", + }, + Processors: []v2.Processor{ + { + ID: "proc1", + Plugin: "js", + Settings: map[string]string{ + "additionalProp1": "string", + "additionalProp2": "string", + }, + }, + }, + }, + }, + }, + }, + }, + } + + file, err := os.Open(filepath) + is.NoErr(err) + defer file.Close() + + got, err := parser.ParseConfigurations(context.Background(), file) + is.NoErr(err) + diff := cmp.Diff(want, got) + if diff != "" { + t.Errorf("%v", diff) + } } func TestParser_V2_Warnings(t *testing.T) { @@ -391,7 +457,7 @@ func TestParser_V2_Warnings(t *testing.T) { // check warnings want := `{"level":"warn","component":"yaml.Parser","line":6,"column":5,"field":"unknownField","message":"field unknownField not found in type v2.Pipeline"} -{"level":"warn","component":"yaml.Parser","line":38,"column":1,"field":"version","value":"2.12","message":"unrecognized version 2.12, falling back to parser version 2.0"} +{"level":"warn","component":"yaml.Parser","line":38,"column":1,"field":"version","value":"2.12","message":"unrecognized version 2.12, falling back to parser version 2.2"} ` is.Equal(out.String(), want) } diff --git a/pkg/provisioning/config/yaml/v1/model.go b/pkg/provisioning/config/yaml/v1/model.go index 0a3dca891..fc0be2a5e 100644 --- a/pkg/provisioning/config/yaml/v1/model.go +++ b/pkg/provisioning/config/yaml/v1/model.go @@ -156,7 +156,8 @@ func (c Connector) processorsToConfig() []config.Processor { func (p Processor) ToConfig() config.Processor { return config.Processor{ - Type: p.Type, + // Type was removed in favor of Plugin + Plugin: p.Type, Settings: p.Settings, Workers: p.Workers, } diff --git a/pkg/provisioning/config/yaml/v2/config.go b/pkg/provisioning/config/yaml/v2/config.go index 71a026bba..d6efb6172 100644 --- a/pkg/provisioning/config/yaml/v2/config.go +++ b/pkg/provisioning/config/yaml/v2/config.go @@ -74,7 +74,7 @@ func fromProcessorsConfig(procs []config.Processor) []Processor { for i, proc := range procs { processors[i] = Processor{ ID: proc.ID, - Type: proc.Type, + Plugin: proc.Plugin, Settings: proc.Settings, Workers: proc.Workers, } diff --git a/pkg/provisioning/config/yaml/v2/config_test.go b/pkg/provisioning/config/yaml/v2/config_test.go index cf3fdaeb2..b0b90aa38 100644 --- a/pkg/provisioning/config/yaml/v2/config_test.go +++ b/pkg/provisioning/config/yaml/v2/config_test.go @@ -54,8 +54,8 @@ func testPipelineConfigs() []config.Pipeline { }, Processors: []config.Processor{ { - ID: "proc1", - Type: "js", + ID: "proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -73,8 +73,8 @@ func testPipelineConfigs() []config.Pipeline { }, Processors: []config.Processor{ { - ID: "pipeline1proc1", - Type: "js", + ID: "pipeline1proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -108,8 +108,8 @@ func testPipelineConfigs() []config.Pipeline { }, Processors: []config.Processor{ { - ID: "con2proc1", - Type: "hoistfield", + ID: "con2proc1", + Plugin: "hoistfield", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -127,7 +127,7 @@ func expectedModelConfiguration() Configuration { intPtr := func(i int) *int { return &i } return Configuration{ - Version: "2.0", + Version: "2.2", Pipelines: []Pipeline{ { ID: "pipeline1", @@ -146,8 +146,8 @@ func expectedModelConfiguration() Configuration { }, Processors: []Processor{ { - ID: "proc1", - Type: "js", + ID: "proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -166,8 +166,8 @@ func expectedModelConfiguration() Configuration { }, Processors: []Processor{ { - ID: "pipeline1proc1", - Type: "js", + ID: "pipeline1proc1", + Plugin: "js", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", @@ -201,8 +201,8 @@ func expectedModelConfiguration() Configuration { }, Processors: []Processor{ { - ID: "con2proc1", - Type: "hoistfield", + ID: "con2proc1", + Plugin: "hoistfield", Settings: map[string]string{ "additionalProp1": "string", "additionalProp2": "string", diff --git a/pkg/provisioning/config/yaml/v2/model.go b/pkg/provisioning/config/yaml/v2/model.go index 2d8927d52..0faedc044 100644 --- a/pkg/provisioning/config/yaml/v2/model.go +++ b/pkg/provisioning/config/yaml/v2/model.go @@ -19,22 +19,47 @@ import ( "github.com/conduitio/conduit/pkg/provisioning/config/yaml/internal" ) -const LatestVersion = "2.0" +const LatestVersion = "2.2" const MajorVersion = "2" // Changelog should be adjusted every time we change the pipeline config and add // a new config version. Based on the changelog the parser will output warnings. var Changelog = internal.Changelog{ "2.0": {}, // initial version - "2.1": {{ - Field: "pipelines.*.processors.*.condition", - ChangeType: internal.FieldIntroduced, - Message: "field condition was introduced in version 2.1, please update the pipeline config version", - }, { - Field: "pipelines.*.connectors.*.processors.*.condition", - ChangeType: internal.FieldIntroduced, - Message: "field condition was introduced in version 2.1, please update the pipeline config version", - }}, + "2.1": { + { + Field: "pipelines.*.processors.*.condition", + ChangeType: internal.FieldIntroduced, + Message: "field condition was introduced in version 2.1, please update the pipeline config version", + }, + { + Field: "pipelines.*.connectors.*.processors.*.condition", + ChangeType: internal.FieldIntroduced, + Message: "field condition was introduced in version 2.1, please update the pipeline config version", + }, + }, + "2.2": { + { + Field: "pipelines.*.processors.*.plugin", + ChangeType: internal.FieldIntroduced, + Message: "field plugin was introduced in version 2.2, please update the pipeline config version", + }, + { + Field: "pipelines.*.connectors.*.processors.*.plugin", + ChangeType: internal.FieldIntroduced, + Message: "field plugin was introduced in version 2.2, please update the pipeline config version", + }, + { + Field: "pipelines.*.processors.*.type", + ChangeType: internal.FieldDeprecated, + Message: "please use field 'plugin' (introduced in version 2.2)", + }, + { + Field: "pipelines.*.connectors.*.processors.*.type", + ChangeType: internal.FieldDeprecated, + Message: "please use field 'plugin' (introduced in version 2.2)", + }, + }, } type Configuration struct { @@ -64,6 +89,7 @@ type Connector struct { type Processor struct { ID string `yaml:"id" json:"id"` Type string `yaml:"type" json:"type"` + Plugin string `yaml:"plugin" json:"plugin"` Condition string `yaml:"condition" json:"condition"` Settings map[string]string `yaml:"settings" json:"settings"` Workers int `yaml:"workers" json:"workers"` @@ -145,9 +171,14 @@ func (c Connector) processorsToConfig() []config.Processor { } func (p Processor) ToConfig() config.Processor { + plugin := p.Plugin + if plugin == "" { + plugin = p.Type + } + return config.Processor{ ID: p.ID, - Type: p.Type, + Plugin: plugin, Settings: p.Settings, Workers: p.Workers, Condition: p.Condition, diff --git a/pkg/provisioning/config/yaml/v2/model_test.go b/pkg/provisioning/config/yaml/v2/model_test.go index f20f5f0eb..f9406b8ff 100644 --- a/pkg/provisioning/config/yaml/v2/model_test.go +++ b/pkg/provisioning/config/yaml/v2/model_test.go @@ -32,7 +32,7 @@ func TestConfiguration_JSON(t *testing.T) { Processors: []Processor{ { ID: "pipeline1proc1", - Type: "js", + Plugin: "alpha", Condition: "{{ true }}", Settings: map[string]string{ "additionalProp1": "string", @@ -91,6 +91,7 @@ func TestConfiguration_JSON(t *testing.T) { { "id": "proc1", "type": "js", + "plugin": "", "condition": "", "settings": { "additionalProp1": "string", @@ -104,7 +105,8 @@ func TestConfiguration_JSON(t *testing.T) { "processors": [ { "id": "pipeline1proc1", - "type": "js", + "type": "", + "plugin": "alpha", "condition": "{{ true }}", "settings": { "additionalProp1": "string", diff --git a/pkg/provisioning/config/yaml/v2/testdata/pipelines1-success.yml b/pkg/provisioning/config/yaml/v2/testdata/pipelines1-success.yml index b3b5d3c61..7d65d63bc 100644 --- a/pkg/provisioning/config/yaml/v2/testdata/pipelines1-success.yml +++ b/pkg/provisioning/config/yaml/v2/testdata/pipelines1-success.yml @@ -1,5 +1,5 @@ --- -version: 2.0 +version: 2.2 pipelines: - id: pipeline1 @@ -17,13 +17,13 @@ pipelines: aws.bucket: my-bucket processors: - id: proc1 - type: js + plugin: js settings: additionalProp1: string additionalProp2: string processors: - id: pipeline1proc1 - type: js + plugin: js settings: additionalProp1: string additionalProp2: string @@ -50,7 +50,7 @@ pipelines: path: my/path processors: - id: con2proc1 - type: hoistfield + plugin: hoistfield settings: additionalProp1: string additionalProp2: string diff --git a/pkg/provisioning/config/yaml/v2/testdata/pipelines6-bwc.yml b/pkg/provisioning/config/yaml/v2/testdata/pipelines6-bwc.yml new file mode 100644 index 000000000..969a75b2a --- /dev/null +++ b/pkg/provisioning/config/yaml/v2/testdata/pipelines6-bwc.yml @@ -0,0 +1,28 @@ +--- +version: 2.2 + +pipelines: + - id: pipeline6 + status: running + name: pipeline6 + description: desc1 + connectors: + - id: con1 + type: source + plugin: builtin:s3 + name: s3-source + settings: + aws.region: us-east-1 + aws.bucket: my-bucket + processors: + - id: proc1 + plugin: js + settings: + additionalProp1: string + additionalProp2: string + processors: + - id: pipeline1proc1 + type: js + settings: + additionalProp1: string + additionalProp2: string diff --git a/pkg/provisioning/export.go b/pkg/provisioning/export.go index b6ad7db12..a2b2f0e4a 100644 --- a/pkg/provisioning/export.go +++ b/pkg/provisioning/export.go @@ -124,7 +124,7 @@ func (*Service) connectorToConfig(c *connector.Instance) config.Connector { func (*Service) processorToConfig(p *processor.Instance) config.Processor { return config.Processor{ ID: p.ID, - Type: p.Type, + Plugin: p.Plugin, Settings: p.Config.Settings, Workers: p.Config.Workers, } diff --git a/pkg/provisioning/import.go b/pkg/provisioning/import.go index d500c99ee..80bc3cc2b 100644 --- a/pkg/provisioning/import.go +++ b/pkg/provisioning/import.go @@ -80,18 +80,18 @@ func (s *Service) rollbackActions(ctx context.Context, actions []action) bool { func (s *Service) newActionsBuilder() actionsBuilder { return actionsBuilder{ - pipelineService: s.pipelineService, - connectorService: s.connectorService, - processorService: s.processorService, - pluginService: s.pluginService, + pipelineService: s.pipelineService, + connectorService: s.connectorService, + processorService: s.processorService, + connectorPluginService: s.connectorPluginService, } } type actionsBuilder struct { - pipelineService PipelineService - connectorService ConnectorService - processorService ProcessorService - pluginService PluginService + pipelineService PipelineService + connectorService ConnectorService + processorService ProcessorService + connectorPluginService ConnectorPluginService } func (ab actionsBuilder) Build(oldConfig, newConfig config.Pipeline) []action { @@ -232,18 +232,18 @@ func (ab actionsBuilder) prepareConnectorActions(oldConfig, newConfig config.Con if oldConfig.ID == "" { // no old config, it's a brand new connector return []action{createConnectorAction{ - cfg: newConfig, - pipelineID: pipelineID, - connectorService: ab.connectorService, - pluginService: ab.pluginService, + cfg: newConfig, + pipelineID: pipelineID, + connectorService: ab.connectorService, + connectorPluginService: ab.connectorPluginService, }} } else if newConfig.ID == "" { // no new config, it's an old connector that needs to be deleted return []action{deleteConnectorAction{ - cfg: oldConfig, - pipelineID: pipelineID, - connectorService: ab.connectorService, - pluginService: ab.pluginService, + cfg: oldConfig, + pipelineID: pipelineID, + connectorService: ab.connectorService, + connectorPluginService: ab.connectorPluginService, }} } @@ -274,16 +274,16 @@ func (ab actionsBuilder) prepareConnectorActions(oldConfig, newConfig config.Con // we have to delete the old connector and create a new one return []action{ deleteConnectorAction{ - cfg: oldConfig, - pipelineID: pipelineID, - connectorService: ab.connectorService, - pluginService: ab.pluginService, + cfg: oldConfig, + pipelineID: pipelineID, + connectorService: ab.connectorService, + connectorPluginService: ab.connectorPluginService, }, createConnectorAction{ - cfg: newConfig, - pipelineID: pipelineID, - connectorService: ab.connectorService, - pluginService: ab.pluginService, + cfg: newConfig, + pipelineID: pipelineID, + connectorService: ab.connectorService, + connectorPluginService: ab.connectorPluginService, }, } } diff --git a/pkg/provisioning/import_actions.go b/pkg/provisioning/import_actions.go index 0a14e83bb..fd71d16fa 100644 --- a/pkg/provisioning/import_actions.go +++ b/pkg/provisioning/import_actions.go @@ -96,8 +96,8 @@ type createConnectorAction struct { cfg config.Connector pipelineID string - connectorService ConnectorService - pluginService PluginService + connectorService ConnectorService + connectorPluginService ConnectorPluginService } func (a createConnectorAction) String() string { @@ -131,7 +131,7 @@ func (a createConnectorAction) Do(ctx context.Context) error { return nil } func (a createConnectorAction) Rollback(ctx context.Context) error { - err := a.connectorService.Delete(ctx, a.cfg.ID, a.pluginService) + err := a.connectorService.Delete(ctx, a.cfg.ID, a.connectorPluginService) // ignore instance not found errors, this means the action failed to // create the connector in the first place if cerrors.Is(err, connector.ErrInstanceNotFound) { @@ -163,7 +163,7 @@ func (a createProcessorAction) Do(ctx context.Context) error { _, err := a.processorService.Create( ctx, a.cfg.ID, - a.cfg.Type, + a.cfg.Plugin, a.parent, processor.Config{ Settings: a.cfg.Settings, diff --git a/pkg/provisioning/import_actions_test.go b/pkg/provisioning/import_actions_test.go index 7dbb18665..7cd60597b 100644 --- a/pkg/provisioning/import_actions_test.go +++ b/pkg/provisioning/import_actions_test.go @@ -294,10 +294,10 @@ func TestCreateConnectorAction_Do(t *testing.T) { connSrv.EXPECT().AddProcessor(ctx, haveCfg.ID, haveCfg.Processors[1].ID) a := createConnectorAction{ - cfg: haveCfg, - pipelineID: pipelineID, - connectorService: connSrv, - pluginService: nil, // only needed for Rollback + cfg: haveCfg, + pipelineID: pipelineID, + connectorService: connSrv, + connectorPluginService: nil, // only needed for Rollback } err := a.Do(ctx) is.NoErr(err) @@ -320,15 +320,15 @@ func TestCreateConnectorAction_Rollback(t *testing.T) { Processors: []config.Processor{{ID: "proc1"}, {ID: "proc2"}}, } - plugSrv := mock.NewPluginService(ctrl) + connPlugSrv := mock.NewConnectorPluginService(ctrl) connSrv := mock.NewConnectorService(ctrl) - connSrv.EXPECT().Delete(ctx, haveCfg.ID, plugSrv) + connSrv.EXPECT().Delete(ctx, haveCfg.ID, connPlugSrv) a := createConnectorAction{ - cfg: haveCfg, - pipelineID: pipelineID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: haveCfg, + pipelineID: pipelineID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, } err := a.Rollback(ctx) is.NoErr(err) @@ -413,15 +413,15 @@ func TestDeleteConnectorAction_Do(t *testing.T) { Processors: []config.Processor{{ID: "proc1"}, {ID: "proc2"}}, } - plugSrv := mock.NewPluginService(ctrl) + connPlugSrv := mock.NewConnectorPluginService(ctrl) connSrv := mock.NewConnectorService(ctrl) - connSrv.EXPECT().Delete(ctx, haveCfg.ID, plugSrv) + connSrv.EXPECT().Delete(ctx, haveCfg.ID, connPlugSrv) a := deleteConnectorAction{ - cfg: haveCfg, - pipelineID: pipelineID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: haveCfg, + pipelineID: pipelineID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, } err := a.Do(ctx) is.NoErr(err) @@ -462,10 +462,10 @@ func TestDeleteConnectorAction_Rollback(t *testing.T) { connSrv.EXPECT().AddProcessor(ctx, haveCfg.ID, haveCfg.Processors[1].ID) a := deleteConnectorAction{ - cfg: haveCfg, - pipelineID: pipelineID, - connectorService: connSrv, - pluginService: nil, // only needed for Do + cfg: haveCfg, + pipelineID: pipelineID, + connectorService: connSrv, + connectorPluginService: nil, // only needed for Do } err := a.Rollback(ctx) is.NoErr(err) @@ -480,7 +480,7 @@ func TestCreateProcessorAction_Do(t *testing.T) { haveCfg := config.Processor{ ID: uuid.NewString(), - Type: "processor-type", + Plugin: "processor-type", Settings: map[string]string{"foo": "bar"}, Workers: 2, Condition: "{{ eq .Metadata.opencdc.version \"v1\" }}", @@ -495,7 +495,7 @@ func TestCreateProcessorAction_Do(t *testing.T) { } procSrv := mock.NewProcessorService(ctrl) - procSrv.EXPECT().Create(ctx, haveCfg.ID, haveCfg.Type, parent, wantCfg, processor.ProvisionTypeConfig, haveCfg.Condition) + procSrv.EXPECT().Create(ctx, haveCfg.ID, haveCfg.Plugin, parent, wantCfg, processor.ProvisionTypeConfig, haveCfg.Condition) a := createProcessorAction{ cfg: haveCfg, @@ -513,7 +513,7 @@ func TestCreateProcessorAction_Rollback(t *testing.T) { haveCfg := config.Processor{ ID: uuid.NewString(), - Type: "processor-type", + Plugin: "processor-type", Settings: map[string]string{"foo": "bar"}, Workers: 2, } @@ -533,7 +533,7 @@ func TestCreateProcessorAction_Rollback(t *testing.T) { func TestUpdateProcessorAction(t *testing.T) { haveCfg := config.Processor{ ID: uuid.NewString(), - Type: "processor-type", + Plugin: "processor-type", Settings: map[string]string{"foo": "bar"}, Workers: 2, } @@ -594,7 +594,7 @@ func TestDeleteProcessorAction_Do(t *testing.T) { haveCfg := config.Processor{ ID: uuid.NewString(), - Type: "processor-type", + Plugin: "processor-type", Settings: map[string]string{"foo": "bar"}, Workers: 2, } @@ -618,7 +618,7 @@ func TestDeleteProcessorAction_Rollback(t *testing.T) { haveCfg := config.Processor{ ID: uuid.NewString(), - Type: "processor-type", + Plugin: "processor-type", Settings: map[string]string{"foo": "bar"}, Workers: 2, Condition: "{{ eq .Metadata.opencdc.version \"v1\" }}", @@ -633,7 +633,7 @@ func TestDeleteProcessorAction_Rollback(t *testing.T) { } procSrv := mock.NewProcessorService(ctrl) - procSrv.EXPECT().Create(ctx, haveCfg.ID, haveCfg.Type, parent, wantCfg, processor.ProvisionTypeConfig, haveCfg.Condition) + procSrv.EXPECT().Create(ctx, haveCfg.ID, haveCfg.Plugin, parent, wantCfg, processor.ProvisionTypeConfig, haveCfg.Condition) a := deleteProcessorAction{ cfg: haveCfg, diff --git a/pkg/provisioning/import_test.go b/pkg/provisioning/import_test.go index 622217668..a8cc43951 100644 --- a/pkg/provisioning/import_test.go +++ b/pkg/provisioning/import_test.go @@ -131,7 +131,7 @@ func TestActionBuilder_Build(t *testing.T) { logger := log.Nop() ctrl := gomock.NewController(t) - srv, pipSrv, connSrv, procSrv, plugSrv := newTestService(ctrl, logger) + srv, pipSrv, connSrv, procSrv, connPlugSrv := newTestService(ctrl, logger) oldConfig := config.Pipeline{ ID: "config-id", @@ -164,21 +164,21 @@ func TestActionBuilder_Build(t *testing.T) { }}, Processors: []config.Processor{{ // this processor does not change, it should be ignored - ID: "proc-1", - Type: "proc-type", + ID: "proc-1", + Plugin: "proc-type", }, { // this processor contains an invalid change, it should be recreated - ID: "proc-2", - Type: "old-proc-type", // type was updated + ID: "proc-2", + Plugin: "old-proc-type", // type was updated }, { // this processor gets new settings, it should be updated ID: "proc-3", - Type: "proc-type", + Plugin: "proc-type", Settings: map[string]string{"foo": "bar"}, }, { // this processor is deleted in the new config, it should be deleted - ID: "proc-deleted", - Type: "proc-type", + ID: "proc-deleted", + Plugin: "proc-type", }}, } newConfig := config.Pipeline{ @@ -212,30 +212,30 @@ func TestActionBuilder_Build(t *testing.T) { }}, Processors: []config.Processor{{ // this processor does not change, it should be ignored - ID: "proc-1", - Type: "proc-type", + ID: "proc-1", + Plugin: "proc-type", }, { // this processor contains an invalid change, it should be recreated - ID: "proc-2", - Type: "new-proc-type", // type was updated + ID: "proc-2", + Plugin: "new-proc-type", // type was updated }, { // this processor gets new settings, it should be updated ID: "proc-3", - Type: "proc-type", + Plugin: "proc-type", Settings: map[string]string{"foo": "baz"}, }, { // this processor is new, it should be created - ID: "proc-new", - Type: "proc-type", + ID: "proc-new", + Plugin: "proc-type", }}, } wantOldActions := []action{ deleteConnectorAction{ - cfg: oldConfig.Connectors[3], - pipelineID: oldConfig.ID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: oldConfig.Connectors[3], + pipelineID: oldConfig.ID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, }, deleteProcessorAction{ cfg: oldConfig.Connectors[3].Processors[0], @@ -261,16 +261,16 @@ func TestActionBuilder_Build(t *testing.T) { pipelineService: pipSrv, }, deleteConnectorAction{ - cfg: oldConfig.Connectors[1], - pipelineID: oldConfig.ID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: oldConfig.Connectors[1], + pipelineID: oldConfig.ID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, }, createConnectorAction{ - cfg: newConfig.Connectors[1], - pipelineID: newConfig.ID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: newConfig.Connectors[1], + pipelineID: newConfig.ID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, }, updateConnectorAction{ oldConfig: oldConfig.Connectors[2], @@ -278,10 +278,10 @@ func TestActionBuilder_Build(t *testing.T) { connectorService: connSrv, }, createConnectorAction{ - cfg: newConfig.Connectors[3], - pipelineID: newConfig.ID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: newConfig.Connectors[3], + pipelineID: newConfig.ID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, }, createProcessorAction{ cfg: newConfig.Connectors[3].Processors[0], @@ -426,13 +426,13 @@ func TestActionsBuilder_PreparePipelineActions_NoAction(t *testing.T) { name: "different Processors (same ID)", oldConfig: config.Pipeline{ID: "config-id", Processors: []config.Processor{{ ID: "proc-id", // only ID has to match - Type: "old-type", + Plugin: "old-type", Settings: map[string]string{"foo": "bar"}, Workers: 1, }}}, newConfig: config.Pipeline{ID: "config-id", Processors: []config.Processor{{ ID: "proc-id", // only ID has to match - Type: "new-type", + Plugin: "new-type", Settings: map[string]string{"foo": "baz"}, Workers: 2, }}}, @@ -503,17 +503,17 @@ func TestActionsBuilder_PrepareConnectorActions_Create(t *testing.T) { logger := log.Nop() ctrl := gomock.NewController(t) - srv, _, connSrv, _, plugSrv := newTestService(ctrl, logger) + srv, _, connSrv, _, connPlugSrv := newTestService(ctrl, logger) oldConfig := config.Connector{} newConfig := config.Connector{ID: "config-id"} pipelineID := uuid.NewString() want := []action{createConnectorAction{ - cfg: newConfig, - pipelineID: pipelineID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: newConfig, + pipelineID: pipelineID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, }} got := srv.newActionsBuilder().prepareConnectorActions(oldConfig, newConfig, pipelineID) @@ -525,17 +525,17 @@ func TestActionsBuilder_PrepareConnectorActions_Delete(t *testing.T) { logger := log.Nop() ctrl := gomock.NewController(t) - srv, _, connSrv, _, plugSrv := newTestService(ctrl, logger) + srv, _, connSrv, _, connPlugSrv := newTestService(ctrl, logger) oldConfig := config.Connector{ID: "config-id"} newConfig := config.Connector{} pipelineID := uuid.NewString() want := []action{deleteConnectorAction{ - cfg: oldConfig, - pipelineID: pipelineID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: oldConfig, + pipelineID: pipelineID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, }} got := srv.newActionsBuilder().prepareConnectorActions(oldConfig, newConfig, pipelineID) @@ -560,13 +560,13 @@ func TestActionsBuilder_PrepareConnectorActions_NoAction(t *testing.T) { name: "different Processors", oldConfig: config.Connector{ID: "config-id", Processors: []config.Processor{{ ID: "proc-id", // only ID has to match - Type: "old-type", + Plugin: "old-type", Settings: map[string]string{"foo": "bar"}, Workers: 1, }}}, newConfig: config.Connector{ID: "config-id", Processors: []config.Processor{{ ID: "proc-id", // only ID has to match - Type: "new-type", + Plugin: "new-type", Settings: map[string]string{"foo": "baz"}, Workers: 2, }}}, @@ -623,7 +623,7 @@ func TestActionsBuilder_PrepareConnectorActions_Recreate(t *testing.T) { logger := log.Nop() ctrl := gomock.NewController(t) - srv, _, connSrv, _, plugSrv := newTestService(ctrl, logger) + srv, _, connSrv, _, connPlugSrv := newTestService(ctrl, logger) pipelineID := uuid.NewString() testCases := []struct { @@ -644,15 +644,15 @@ func TestActionsBuilder_PrepareConnectorActions_Recreate(t *testing.T) { t.Run(tc.name, func(t *testing.T) { is := is.New(t) want := []action{deleteConnectorAction{ - cfg: tc.oldConfig, - pipelineID: pipelineID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: tc.oldConfig, + pipelineID: pipelineID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, }, createConnectorAction{ - cfg: tc.newConfig, - pipelineID: pipelineID, - connectorService: connSrv, - pluginService: plugSrv, + cfg: tc.newConfig, + pipelineID: pipelineID, + connectorService: connSrv, + connectorPluginService: connPlugSrv, }} got := srv.newActionsBuilder().prepareConnectorActions(tc.oldConfig, tc.newConfig, pipelineID) is.Equal(got, want) @@ -780,8 +780,8 @@ func TestActionsBuilder_PrepareProcessorActions_Recreate(t *testing.T) { newConfig config.Processor }{{ name: "different Type", - oldConfig: config.Processor{ID: "config-id", Type: "old-type"}, - newConfig: config.Processor{ID: "config-id", Type: "new-type"}, + oldConfig: config.Processor{ID: "config-id", Plugin: "old-type"}, + newConfig: config.Processor{ID: "config-id", Plugin: "new-type"}, }} for _, tc := range testCases { @@ -808,16 +808,16 @@ func TestActionsBuilder_PrepareProcessorActions_Recreate(t *testing.T) { func intPtr(i int) *int { return &i } -func newTestService(ctrl *gomock.Controller, logger log.CtxLogger) (*Service, *mock.PipelineService, *mock.ConnectorService, *mock.ProcessorService, *mock.PluginService) { +func newTestService(ctrl *gomock.Controller, logger log.CtxLogger) (*Service, *mock.PipelineService, *mock.ConnectorService, *mock.ProcessorService, *mock.ConnectorPluginService) { db := &inmemory.DB{} pipSrv := mock.NewPipelineService(ctrl) connSrv := mock.NewConnectorService(ctrl) procSrv := mock.NewProcessorService(ctrl) - plugSrv := mock.NewPluginService(ctrl) + connPlugSrv := mock.NewConnectorPluginService(ctrl) - srv := NewService(db, logger, pipSrv, connSrv, procSrv, plugSrv, "") + srv := NewService(db, logger, pipSrv, connSrv, procSrv, connPlugSrv, "") - return srv, pipSrv, connSrv, procSrv, plugSrv + return srv, pipSrv, connSrv, procSrv, connPlugSrv } type fakeAction struct { diff --git a/pkg/provisioning/interfaces.go b/pkg/provisioning/interfaces.go index 649761018..f2ac63594 100644 --- a/pkg/provisioning/interfaces.go +++ b/pkg/provisioning/interfaces.go @@ -20,11 +20,11 @@ import ( "github.com/conduitio/conduit/pkg/connector" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/pipeline" - "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/processor" ) -//go:generate mockgen -destination=mock/provisioning.go -package=mock -mock_names=PipelineService=PipelineService,ConnectorService=ConnectorService,ProcessorService=ProcessorService,PluginService=PluginService . PipelineService,ConnectorService,ProcessorService,PluginService +//go:generate mockgen -destination=mock/provisioning.go -package=mock -mock_names=PipelineService=PipelineService,ConnectorService=ConnectorService,ProcessorService=ProcessorService,ConnectorPluginService=ConnectorPluginService . PipelineService,ConnectorService,ProcessorService,ConnectorPluginService type PipelineService interface { Get(ctx context.Context, id string) (*pipeline.Instance, error) @@ -34,7 +34,7 @@ type PipelineService interface { Delete(ctx context.Context, pipelineID string) error UpdateDLQ(ctx context.Context, pipelineID string, cfg pipeline.DLQ) (*pipeline.Instance, error) - Start(ctx context.Context, connFetcher pipeline.ConnectorFetcher, procFetcher pipeline.ProcessorFetcher, pluginFetcher pipeline.PluginDispenserFetcher, pipelineID string) error + Start(ctx context.Context, connFetcher pipeline.ConnectorFetcher, procService pipeline.ProcessorService, pluginFetcher pipeline.PluginDispenserFetcher, pipelineID string) error Stop(ctx context.Context, pipelineID string, force bool) error AddConnector(ctx context.Context, pipelineID string, connectorID string) (*pipeline.Instance, error) @@ -55,11 +55,20 @@ type ConnectorService interface { type ProcessorService interface { Get(ctx context.Context, id string) (*processor.Instance, error) - Create(ctx context.Context, id string, procType string, parent processor.Parent, cfg processor.Config, p processor.ProvisionType, condition string) (*processor.Instance, error) + Create( + ctx context.Context, + id string, + plugin string, + parent processor.Parent, + cfg processor.Config, + p processor.ProvisionType, + condition string, + ) (*processor.Instance, error) + MakeRunnableProcessor(ctx context.Context, i *processor.Instance) (*processor.RunnableProcessor, error) Update(ctx context.Context, id string, cfg processor.Config) (*processor.Instance, error) Delete(ctx context.Context, id string) error } -type PluginService interface { - NewDispenser(ctx log.CtxLogger, name string) (plugin.Dispenser, error) +type ConnectorPluginService interface { + NewDispenser(ctx log.CtxLogger, name string) (connectorPlugin.Dispenser, error) } diff --git a/pkg/provisioning/mock/provisioning.go b/pkg/provisioning/mock/provisioning.go index bf5cb564b..57c629965 100644 --- a/pkg/provisioning/mock/provisioning.go +++ b/pkg/provisioning/mock/provisioning.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/conduitio/conduit/pkg/provisioning (interfaces: PipelineService,ConnectorService,ProcessorService,PluginService) +// Source: github.com/conduitio/conduit/pkg/provisioning (interfaces: PipelineService,ConnectorService,ProcessorService,ConnectorPluginService) // // Generated by this command: // -// mockgen -destination=mock/provisioning.go -package=mock -mock_names=PipelineService=PipelineService,ConnectorService=ConnectorService,ProcessorService=ProcessorService,PluginService=PluginService . PipelineService,ConnectorService,ProcessorService,PluginService +// mockgen -destination=mock/provisioning.go -package=mock -mock_names=PipelineService=PipelineService,ConnectorService=ConnectorService,ProcessorService=ProcessorService,ConnectorPluginService=ConnectorPluginService . PipelineService,ConnectorService,ProcessorService,ConnectorPluginService // // Package mock is a generated GoMock package. @@ -16,7 +16,7 @@ import ( connector "github.com/conduitio/conduit/pkg/connector" log "github.com/conduitio/conduit/pkg/foundation/log" pipeline "github.com/conduitio/conduit/pkg/pipeline" - plugin "github.com/conduitio/conduit/pkg/plugin" + connector0 "github.com/conduitio/conduit/pkg/plugin/connector" processor "github.com/conduitio/conduit/pkg/processor" gomock "go.uber.org/mock/gomock" ) @@ -163,7 +163,7 @@ func (mr *PipelineServiceMockRecorder) RemoveProcessor(arg0, arg1, arg2 any) *go } // Start mocks base method. -func (m *PipelineService) Start(arg0 context.Context, arg1 pipeline.ConnectorFetcher, arg2 pipeline.ProcessorFetcher, arg3 pipeline.PluginDispenserFetcher, arg4 string) error { +func (m *PipelineService) Start(arg0 context.Context, arg1 pipeline.ConnectorFetcher, arg2 pipeline.ProcessorService, arg3 pipeline.PluginDispenserFetcher, arg4 string) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Start", arg0, arg1, arg2, arg3, arg4) ret0, _ := ret[0].(error) @@ -399,6 +399,21 @@ func (mr *ProcessorServiceMockRecorder) Get(arg0, arg1 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*ProcessorService)(nil).Get), arg0, arg1) } +// MakeRunnableProcessor mocks base method. +func (m *ProcessorService) MakeRunnableProcessor(arg0 context.Context, arg1 *processor.Instance) (*processor.RunnableProcessor, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "MakeRunnableProcessor", arg0, arg1) + ret0, _ := ret[0].(*processor.RunnableProcessor) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// MakeRunnableProcessor indicates an expected call of MakeRunnableProcessor. +func (mr *ProcessorServiceMockRecorder) MakeRunnableProcessor(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "MakeRunnableProcessor", reflect.TypeOf((*ProcessorService)(nil).MakeRunnableProcessor), arg0, arg1) +} + // Update mocks base method. func (m *ProcessorService) Update(arg0 context.Context, arg1 string, arg2 processor.Config) (*processor.Instance, error) { m.ctrl.T.Helper() @@ -414,40 +429,40 @@ func (mr *ProcessorServiceMockRecorder) Update(arg0, arg1, arg2 any) *gomock.Cal return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Update", reflect.TypeOf((*ProcessorService)(nil).Update), arg0, arg1, arg2) } -// PluginService is a mock of PluginService interface. -type PluginService struct { +// ConnectorPluginService is a mock of ConnectorPluginService interface. +type ConnectorPluginService struct { ctrl *gomock.Controller - recorder *PluginServiceMockRecorder + recorder *ConnectorPluginServiceMockRecorder } -// PluginServiceMockRecorder is the mock recorder for PluginService. -type PluginServiceMockRecorder struct { - mock *PluginService +// ConnectorPluginServiceMockRecorder is the mock recorder for ConnectorPluginService. +type ConnectorPluginServiceMockRecorder struct { + mock *ConnectorPluginService } -// NewPluginService creates a new mock instance. -func NewPluginService(ctrl *gomock.Controller) *PluginService { - mock := &PluginService{ctrl: ctrl} - mock.recorder = &PluginServiceMockRecorder{mock} +// NewConnectorPluginService creates a new mock instance. +func NewConnectorPluginService(ctrl *gomock.Controller) *ConnectorPluginService { + mock := &ConnectorPluginService{ctrl: ctrl} + mock.recorder = &ConnectorPluginServiceMockRecorder{mock} return mock } // EXPECT returns an object that allows the caller to indicate expected use. -func (m *PluginService) EXPECT() *PluginServiceMockRecorder { +func (m *ConnectorPluginService) EXPECT() *ConnectorPluginServiceMockRecorder { return m.recorder } // NewDispenser mocks base method. -func (m *PluginService) NewDispenser(arg0 log.CtxLogger, arg1 string) (plugin.Dispenser, error) { +func (m *ConnectorPluginService) NewDispenser(arg0 log.CtxLogger, arg1 string) (connector0.Dispenser, error) { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "NewDispenser", arg0, arg1) - ret0, _ := ret[0].(plugin.Dispenser) + ret0, _ := ret[0].(connector0.Dispenser) ret1, _ := ret[1].(error) return ret0, ret1 } // NewDispenser indicates an expected call of NewDispenser. -func (mr *PluginServiceMockRecorder) NewDispenser(arg0, arg1 any) *gomock.Call { +func (mr *ConnectorPluginServiceMockRecorder) NewDispenser(arg0, arg1 any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewDispenser", reflect.TypeOf((*PluginService)(nil).NewDispenser), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "NewDispenser", reflect.TypeOf((*ConnectorPluginService)(nil).NewDispenser), arg0, arg1) } diff --git a/pkg/provisioning/mock/provisioning_util.go b/pkg/provisioning/mock/provisioning_util.go index 5ec9da9c7..6d10605df 100644 --- a/pkg/provisioning/mock/provisioning_util.go +++ b/pkg/provisioning/mock/provisioning_util.go @@ -36,5 +36,5 @@ func (mr *ConnectorServiceMockRecorder) CreateWithInstance(ctx interface{}, inst // CreateWithInstance is a utility function that lets you declare an expected call to // Create with arguments taken from the supplied instance. func (mr *ProcessorServiceMockRecorder) CreateWithInstance(ctx interface{}, instance *processor.Instance) *gomock.Call { - return mr.Create(ctx, instance.ID, instance.Type, instance.Parent, instance.Config, instance.ProvisionedBy, instance.Condition) + return mr.Create(ctx, instance.ID, instance.Plugin, instance.Parent, instance.Config, instance.ProvisionedBy, instance.Condition) } diff --git a/pkg/provisioning/service.go b/pkg/provisioning/service.go index c49260135..f63222ce6 100644 --- a/pkg/provisioning/service.go +++ b/pkg/provisioning/service.go @@ -34,14 +34,14 @@ import ( ) type Service struct { - db database.DB - logger log.CtxLogger - parser config.Parser - pipelineService PipelineService - connectorService ConnectorService - processorService ProcessorService - pluginService PluginService - pipelinesPath string + db database.DB + logger log.CtxLogger + parser config.Parser + pipelineService PipelineService + connectorService ConnectorService + processorService ProcessorService + connectorPluginService ConnectorPluginService + pipelinesPath string } func NewService( @@ -50,18 +50,18 @@ func NewService( plService PipelineService, connService ConnectorService, procService ProcessorService, - pluginService PluginService, + connPluginService ConnectorPluginService, pipelinesDir string, ) *Service { return &Service{ - db: db, - logger: logger.WithComponent("provisioning.Service"), - parser: yaml.NewParser(logger), - pipelineService: plService, - connectorService: connService, - processorService: procService, - pluginService: pluginService, - pipelinesPath: pipelinesDir, + db: db, + logger: logger.WithComponent("provisioning.Service"), + parser: yaml.NewParser(logger), + pipelineService: plService, + connectorService: connService, + processorService: procService, + connectorPluginService: connPluginService, + pipelinesPath: pipelinesDir, } } @@ -247,7 +247,7 @@ func (s *Service) provisionPipeline(ctx context.Context, cfg config.Pipeline) er // check if pipeline should be running if cfg.Status == config.StatusRunning { // TODO set status and let the pipeline service start it - err := s.pipelineService.Start(ctx, s.connectorService, s.processorService, s.pluginService, cfg.ID) + err := s.pipelineService.Start(ctx, s.connectorService, s.processorService, s.connectorPluginService, cfg.ID) if err != nil { return cerrors.Errorf("could not start the pipeline %q: %w", cfg.ID, err) } diff --git a/pkg/provisioning/service_test.go b/pkg/provisioning/service_test.go index f036b9709..c0be2d6d3 100644 --- a/pkg/provisioning/service_test.go +++ b/pkg/provisioning/service_test.go @@ -20,22 +20,26 @@ import ( "testing" "time" + "github.com/conduitio/conduit-commons/opencdc" + sdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/connector" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/ctxutil" "github.com/conduitio/conduit/pkg/foundation/database/badger" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/pipeline" - "github.com/conduitio/conduit/pkg/plugin" - "github.com/conduitio/conduit/pkg/plugin/builtin" - "github.com/conduitio/conduit/pkg/plugin/standalone" + conn_plugin "github.com/conduitio/conduit/pkg/plugin/connector" + "github.com/conduitio/conduit/pkg/plugin/connector/builtin" + "github.com/conduitio/conduit/pkg/plugin/connector/standalone" "github.com/conduitio/conduit/pkg/processor" - "github.com/conduitio/conduit/pkg/processor/procbuiltin" + proc_mock "github.com/conduitio/conduit/pkg/processor/mock" p1 "github.com/conduitio/conduit/pkg/provisioning/test/pipelines1" p2 "github.com/conduitio/conduit/pkg/provisioning/test/pipelines2" p3 "github.com/conduitio/conduit/pkg/provisioning/test/pipelines3" p4 "github.com/conduitio/conduit/pkg/provisioning/test/pipelines4-integration-test" "github.com/conduitio/conduit/pkg/record" + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" "github.com/matryer/is" "github.com/rs/zerolog" "go.uber.org/mock/gomock" @@ -89,7 +93,7 @@ var ( Type: processor.ParentTypePipeline, }, Config: processor.Config{Settings: map[string]string{}}, - Type: "js", + Plugin: "js", } oldConnectorProcessorInstance = &processor.Instance{ ID: "pipeline1:con2:proc1con", @@ -98,7 +102,7 @@ var ( Type: processor.ParentTypeConnector, }, Config: processor.Config{Settings: map[string]string{}}, - Type: "js", + Plugin: "js", } ) @@ -483,7 +487,7 @@ func TestService_IntegrationTestServices(t *testing.T) { is.NoErr(err) }) - pluginService := plugin.NewService( + connPluginService := conn_plugin.NewPluginService( logger, builtin.NewRegistry(logger, builtin.DefaultDispenserFactories), standalone.NewRegistry(logger, ""), @@ -491,17 +495,18 @@ func TestService_IntegrationTestServices(t *testing.T) { plService := pipeline.NewService(logger, db) connService := connector.NewService(logger, db, connector.NewPersister(logger, db, time.Second, 3)) - procService := processor.NewService(logger, db, processor.GlobalBuilderRegistry) - - // add builtin processor for removing metadata - // TODO at the time of writing we don't have a processor for manipulating - // metadata, once we have it we can use it instead of adding our own - processor.GlobalBuilderRegistry.MustRegister("removereadat", func(config processor.Config) (processor.Interface, error) { - return procbuiltin.NewFuncWrapper(func(ctx context.Context, r record.Record) (record.Record, error) { - delete(r.Metadata, record.MetadataReadAt) // read at is different every time, remove it - return r, nil - }), nil - }) + + procPluginService := proc_mock.NewPluginService(gomock.NewController(t)) + procPluginService.EXPECT(). + NewProcessor(gomock.Any(), "removereadat", gomock.Any()). + Return( + sdk.NewProcessorFunc(sdk.Specification{Name: "removereadat"}, func(ctx context.Context, r opencdc.Record) (opencdc.Record, error) { + delete(r.Metadata, record.MetadataReadAt) // read at is different every time, remove it + return r, nil + }), + nil, + ).AnyTimes() + procService := processor.NewService(logger, db, procPluginService) // create destination file destFile := "./test/dest-file.txt" @@ -512,7 +517,7 @@ func TestService_IntegrationTestServices(t *testing.T) { is.NoErr(err) }) - service := NewService(db, logger, plService, connService, procService, pluginService, "./test/pipelines4-integration-test") + service := NewService(db, logger, plService, connService, procService, connPluginService, "./test/pipelines4-integration-test") err = service.Init(context.Background()) is.NoErr(err) @@ -539,8 +544,10 @@ func TestService_IntegrationTestServices(t *testing.T) { is.NoErr(err) want.CreatedAt = got.CreatedAt want.UpdatedAt = got.UpdatedAt - got.Processor = nil // processor is a function and can't be compared - is.Equal(got, want) + diff := cmp.Diff(got, want, cmpopts.IgnoreUnexported(processor.Instance{})) + if diff != "" { + t.Errorf("mismatch (-want +got): %s", diff) + } } // checking connectors diff --git a/pkg/provisioning/test/pipelines1/pipelines.go b/pkg/provisioning/test/pipelines1/pipelines.go index 48fbfef55..f912be4d5 100644 --- a/pkg/provisioning/test/pipelines1/pipelines.go +++ b/pkg/provisioning/test/pipelines1/pipelines.go @@ -77,8 +77,8 @@ var P1C2 = &connector.Instance{ } var P1P1 = &processor.Instance{ - ID: "pipeline1:proc1", - Type: "js", + ID: "pipeline1:proc1", + Plugin: "js", Parent: processor.Parent{ ID: "pipeline1", Type: processor.ParentTypePipeline, @@ -87,7 +87,6 @@ var P1P1 = &processor.Instance{ Workers: 1, Settings: map[string]string{"additionalProp1": "string"}, }, - Processor: nil, ProvisionedBy: processor.ProvisionTypeConfig, CreatedAt: time.Now(), @@ -95,8 +94,8 @@ var P1P1 = &processor.Instance{ } var P1C2P1 = &processor.Instance{ - ID: "pipeline1:con2:proc1con", - Type: "js", + ID: "pipeline1:con2:proc1con", + Plugin: "js", Parent: processor.Parent{ ID: "pipeline1:con2", Type: processor.ParentTypeConnector, @@ -105,7 +104,6 @@ var P1C2P1 = &processor.Instance{ Workers: 10, Settings: map[string]string{"additionalProp1": "string"}, }, - Processor: nil, ProvisionedBy: processor.ProvisionTypeConfig, CreatedAt: time.Now(), diff --git a/pkg/provisioning/test/pipelines1/pipelines.yml b/pkg/provisioning/test/pipelines1/pipelines.yml index afb050863..2df6cc0ae 100644 --- a/pkg/provisioning/test/pipelines1/pipelines.yml +++ b/pkg/provisioning/test/pipelines1/pipelines.yml @@ -21,13 +21,13 @@ pipelines: path: my/path/file2.txt processors: - id: proc1con - type: js + plugin: js workers: 10 settings: additionalProp1: string processors: - id: proc1 - type: js + plugin: js settings: additionalProp1: string dead-letter-queue: diff --git a/pkg/provisioning/test/pipelines4-integration-test/pipelines.go b/pkg/provisioning/test/pipelines4-integration-test/pipelines.go index 8402990d3..78540ea20 100644 --- a/pkg/provisioning/test/pipelines4-integration-test/pipelines.go +++ b/pkg/provisioning/test/pipelines4-integration-test/pipelines.go @@ -77,8 +77,8 @@ var P1C2 = &connector.Instance{ } var P1P1 = &processor.Instance{ - ID: "pipeline1:proc1", - Type: "removereadat", + ID: "pipeline1:proc1", + Plugin: "removereadat", Parent: processor.Parent{ ID: "pipeline1", Type: processor.ParentTypePipeline, @@ -87,7 +87,6 @@ var P1P1 = &processor.Instance{ Settings: nil, Workers: 1, }, - Processor: nil, ProvisionedBy: processor.ProvisionTypeConfig, CreatedAt: time.Now(), @@ -95,8 +94,8 @@ var P1P1 = &processor.Instance{ } var P1C2P1 = &processor.Instance{ - ID: "pipeline1:con2:con2proc1", - Type: "removereadat", + ID: "pipeline1:con2:con2proc1", + Plugin: "removereadat", Parent: processor.Parent{ ID: "pipeline1:con2", Type: processor.ParentTypeConnector, @@ -105,7 +104,6 @@ var P1C2P1 = &processor.Instance{ Settings: nil, Workers: 1, }, - Processor: nil, ProvisionedBy: processor.ProvisionTypeConfig, CreatedAt: time.Now(), diff --git a/pkg/provisioning/test/pipelines4-integration-test/pipelines.yml b/pkg/provisioning/test/pipelines4-integration-test/pipelines.yml index 697cb815c..1c7fd9b98 100644 --- a/pkg/provisioning/test/pipelines4-integration-test/pipelines.yml +++ b/pkg/provisioning/test/pipelines4-integration-test/pipelines.yml @@ -22,10 +22,10 @@ pipelines: path: ./test/dest-file.txt processors: - id: con2proc1 - type: removereadat + plugin: removereadat processors: - id: proc1 - type: removereadat + plugin: removereadat --- version: 2.0 pipelines: diff --git a/pkg/record/record.go b/pkg/record/record.go index 8b88aaf47..f5ff579c6 100644 --- a/pkg/record/record.go +++ b/pkg/record/record.go @@ -23,6 +23,7 @@ import ( "strconv" "strings" + "github.com/conduitio/conduit-commons/opencdc" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/record/schema" "github.com/goccy/go-json" @@ -186,6 +187,33 @@ func (r Record) Clone() Record { return clone } +func (r Record) ToOpenCDC() opencdc.Record { + return opencdc.Record{ + Position: opencdc.Position(r.Position), + Operation: opencdc.Operation(r.Operation), + Metadata: opencdc.Metadata(r.Metadata), + Key: toOpenCDCData(r.Key), + Payload: opencdc.Change{ + Before: toOpenCDCData(r.Payload.Before), + After: toOpenCDCData(r.Payload.After), + }, + } +} + +func toOpenCDCData(data Data) opencdc.Data { + switch v := data.(type) { + case nil: + return nil + case RawData: + return opencdc.RawData(v.Bytes()) + case StructuredData: + return opencdc.StructuredData(v) + default: + // this shouldn't happen, we hope + panic(cerrors.Errorf("unrecognized data type: %T", v)) + } +} + type Metadata map[string]string type Change struct { @@ -271,3 +299,30 @@ func (d RawData) Clone() Data { Schema: d.Schema, // this field is currently unused, we don't care about cloning it atm } } + +func FromOpenCDC(in opencdc.Record) Record { + return Record{ + Position: Position(in.Position), + Operation: Operation(in.Operation), + Metadata: Metadata(in.Metadata), + Key: fromOpenCDCData(in.Key), + Payload: Change{ + Before: fromOpenCDCData(in.Payload.Before), + After: fromOpenCDCData(in.Payload.After), + }, + } +} + +func fromOpenCDCData(data opencdc.Data) Data { + switch v := data.(type) { + case nil: + return nil + case opencdc.RawData: + return RawData{Raw: v.Bytes()} + case opencdc.StructuredData: + return StructuredData(v) + default: + // this shouldn't happen, we hope + panic(cerrors.Errorf("unrecognized data type: %T", v)) + } +} diff --git a/pkg/record/record_conversions_test.go b/pkg/record/record_conversions_test.go new file mode 100644 index 000000000..ad92ab19c --- /dev/null +++ b/pkg/record/record_conversions_test.go @@ -0,0 +1,300 @@ +// Copyright © 2022 Meroxa, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package record + +import ( + "fmt" + "testing" + + "github.com/conduitio/conduit-commons/opencdc" + "github.com/matryer/is" +) + +type testCase[C, O any] struct { + name string + conduitType C + opencdcType O +} + +func TestRecord_ToOpenCDC_Keys(t *testing.T) { + testCases := dataTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + underTest := Record{Key: tc.conduitType} + got := underTest.ToOpenCDC() + is.Equal(tc.opencdcType, got.Key) + }) + } +} + +func TestRecord_ToOpenCDC_PayloadBefore(t *testing.T) { + testCases := dataTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + underTest := Record{ + Payload: Change{Before: tc.conduitType}, + } + got := underTest.ToOpenCDC() + is.Equal(tc.opencdcType, got.Payload.Before) + }) + } +} + +func TestRecord_ToOpenCDC_PayloadAfter(t *testing.T) { + testCases := dataTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + underTest := Record{ + Payload: Change{After: tc.conduitType}, + } + got := underTest.ToOpenCDC() + is.Equal(tc.opencdcType, got.Payload.After) + }) + } +} + +func TestRecord_ToOpenCDC_Position(t *testing.T) { + testCases := positionTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + underTest := Record{Position: tc.conduitType} + got := underTest.ToOpenCDC() + is.Equal(tc.opencdcType, got.Position) + }) + } +} + +func TestRecord_ToOpenCDC_Metadata(t *testing.T) { + testCases := metadataTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + underTest := Record{Metadata: tc.conduitType} + got := underTest.ToOpenCDC() + is.Equal(tc.opencdcType, got.Metadata) + }) + } +} + +func TestRecord_ToOpenCDC_Operation(t *testing.T) { + testCases := operationTestCases() + + for _, tc := range testCases { + t.Run(fmt.Sprintf("%v", tc.conduitType), func(t *testing.T) { + is := is.New(t) + + underTest := Record{Operation: tc.conduitType} + got := underTest.ToOpenCDC() + is.Equal(tc.opencdcType, got.Operation) + }) + } +} + +func TestRecord_FromOpenCDC_Keys(t *testing.T) { + testCases := dataTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + got := FromOpenCDC(opencdc.Record{Key: tc.opencdcType}) + is.Equal(tc.conduitType, got.Key) + }) + } +} + +func TestRecord_FromOpenCDC_PayloadBefore(t *testing.T) { + testCases := dataTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + got := FromOpenCDC(opencdc.Record{ + Payload: opencdc.Change{ + Before: tc.opencdcType, + }, + }) + is.Equal(tc.conduitType, got.Payload.Before) + }) + } +} + +func TestRecord_FromOpenCDC_PayloadAfter(t *testing.T) { + testCases := dataTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + got := FromOpenCDC(opencdc.Record{ + Payload: opencdc.Change{ + After: tc.opencdcType, + }, + }) + is.Equal(tc.conduitType, got.Payload.After) + }) + } +} + +func TestRecord_FromOpenCDC_Position(t *testing.T) { + testCases := positionTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + got := FromOpenCDC(opencdc.Record{ + Position: tc.opencdcType, + }) + is.Equal(tc.conduitType, got.Position) + }) + } +} + +func TestRecord_FromOpenCDC_Metadata(t *testing.T) { + testCases := metadataTestCases() + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + is := is.New(t) + + got := FromOpenCDC(opencdc.Record{Metadata: tc.opencdcType}) + is.Equal(tc.conduitType, got.Metadata) + }) + } +} + +func TestRecord_FromOpenCDC_Operation(t *testing.T) { + testCases := operationTestCases() + + for _, tc := range testCases { + t.Run(fmt.Sprintf("%v", tc.conduitType), func(t *testing.T) { + is := is.New(t) + + underTest := Record{Operation: tc.conduitType} + got := underTest.ToOpenCDC() + is.Equal(tc.opencdcType, got.Operation) + }) + } +} + +func positionTestCases() []testCase[Position, opencdc.Position] { + return []testCase[Position, opencdc.Position]{ + { + name: "nil", + }, + { + name: "raw", + opencdcType: opencdc.Position("raw, uncooked data"), + conduitType: Position("raw, uncooked data"), + }, + } +} + +func dataTestCases() []testCase[Data, opencdc.Data] { + return []testCase[Data, opencdc.Data]{ + { + name: "nil", + conduitType: nil, + opencdcType: nil, + }, + { + name: "raw", + conduitType: RawData{Raw: []byte("raw, uncooked data")}, + opencdcType: opencdc.RawData("raw, uncooked data"), + }, + { + name: "structured", + conduitType: StructuredData{ + "key1": "string-value", + "key2": 123, + "key3": []int{4, 5, 6}, + "key4": map[string]interface{}{ + "letters": "abc", + }, + }, + opencdcType: opencdc.StructuredData{ + "key1": "string-value", + "key2": 123, + "key3": []int{4, 5, 6}, + "key4": map[string]interface{}{ + "letters": "abc", + }, + }, + }, + } +} + +func operationTestCases() []testCase[Operation, opencdc.Operation] { + return []testCase[Operation, opencdc.Operation]{ + { + conduitType: OperationCreate, + opencdcType: opencdc.OperationCreate, + }, + { + conduitType: OperationSnapshot, + opencdcType: opencdc.OperationSnapshot, + }, + { + conduitType: OperationUpdate, + opencdcType: opencdc.OperationUpdate, + }, + { + conduitType: OperationDelete, + opencdcType: opencdc.OperationDelete, + }, + } +} + +func metadataTestCases() []testCase[Metadata, opencdc.Metadata] { + return []testCase[Metadata, opencdc.Metadata]{ + { + name: "nil", + }, + { + name: "empty", + conduitType: Metadata{}, + opencdcType: opencdc.Metadata{}, + }, + { + name: "non-empty", + conduitType: Metadata{ + "k": "v", + MetadataOpenCDCVersion: OpenCDCVersion, + MetadataConduitSourcePluginName: "file", + }, + opencdcType: opencdc.Metadata{ + "k": "v", + opencdc.MetadataOpenCDCVersion: opencdc.OpenCDCVersion, + opencdc.MetadataConduitSourcePluginName: "file", + }, + }, + } +} diff --git a/pkg/web/api/connector_v1.go b/pkg/web/api/connector_v1.go index 077a8398f..905c0fd02 100644 --- a/pkg/web/api/connector_v1.go +++ b/pkg/web/api/connector_v1.go @@ -14,10 +14,15 @@ //go:generate mockgen -destination=mock/connector.go -package=mock -mock_names=ConnectorOrchestrator=ConnectorOrchestrator . ConnectorOrchestrator //go:generate mockgen -destination=mock/connector_service.go -package=mock -mock_names=ConnectorService_InspectConnectorServer=ConnectorService_InspectConnectorServer github.com/conduitio/conduit/proto/api/v1 ConnectorService_InspectConnectorServer +//go:generate mockgen -destination=mock/connector_plugin.go -package=mock -mock_names=ConnectorPluginOrchestrator=ConnectorPluginOrchestrator . ConnectorPluginOrchestrator + package api import ( "context" + "regexp" + + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/connector" "github.com/conduitio/conduit/pkg/foundation/cerrors" @@ -39,13 +44,25 @@ type ConnectorOrchestrator interface { Inspect(ctx context.Context, id string) (*inspector.Session, error) } +type ConnectorPluginOrchestrator interface { + // List will return all connector plugins' specs. + List(ctx context.Context) (map[string]connectorPlugin.Specification, error) +} + type ConnectorAPIv1 struct { apiv1.UnimplementedConnectorServiceServer - cs ConnectorOrchestrator + connectorOrchestrator ConnectorOrchestrator + connectorPluginOrchestrator ConnectorPluginOrchestrator } -func NewConnectorAPIv1(cs ConnectorOrchestrator) *ConnectorAPIv1 { - return &ConnectorAPIv1{cs: cs} +func NewConnectorAPIv1( + co ConnectorOrchestrator, + cpo ConnectorPluginOrchestrator, +) *ConnectorAPIv1 { + return &ConnectorAPIv1{ + connectorOrchestrator: co, + connectorPluginOrchestrator: cpo, + } } func (c *ConnectorAPIv1) Register(srv *grpc.Server) { @@ -57,7 +74,7 @@ func (c *ConnectorAPIv1) ListConnectors( req *apiv1.ListConnectorsRequest, ) (*apiv1.ListConnectorsResponse, error) { // TODO: Implement filtering and limiting. - list := c.cs.List(ctx) + list := c.connectorOrchestrator.List(ctx) var clist []*apiv1.Connector for _, v := range list { if req.PipelineId == "" || req.PipelineId == v.PipelineID { @@ -73,7 +90,7 @@ func (c *ConnectorAPIv1) InspectConnector(req *apiv1.InspectConnectorRequest, se return status.ConnectorError(cerrors.ErrEmptyID) } - session, err := c.cs.Inspect(server.Context(), req.Id) + session, err := c.connectorOrchestrator.Inspect(server.Context(), req.Id) if err != nil { return status.ConnectorError(cerrors.Errorf("failed to inspect connector: %w", err)) } @@ -105,7 +122,7 @@ func (c *ConnectorAPIv1) GetConnector( } // fetch the connector from the ConnectorOrchestrator - pr, err := c.cs.Get(ctx, req.Id) + pr, err := c.connectorOrchestrator.Get(ctx, req.Id) if err != nil { return nil, status.ConnectorError(cerrors.Errorf("failed to get connector by ID: %w", err)) } @@ -121,7 +138,7 @@ func (c *ConnectorAPIv1) CreateConnector( ctx context.Context, req *apiv1.CreateConnectorRequest, ) (*apiv1.CreateConnectorResponse, error) { - created, err := c.cs.Create( + created, err := c.connectorOrchestrator.Create( ctx, fromproto.ConnectorType(req.Type), req.Plugin, @@ -145,7 +162,7 @@ func (c *ConnectorAPIv1) UpdateConnector( return nil, cerrors.ErrEmptyID } - updated, err := c.cs.Update(ctx, req.Id, fromproto.ConnectorConfig(req.Config)) + updated, err := c.connectorOrchestrator.Update(ctx, req.Id, fromproto.ConnectorConfig(req.Config)) if err != nil { return nil, status.ConnectorError(cerrors.Errorf("failed to update connector: %w", err)) @@ -157,7 +174,7 @@ func (c *ConnectorAPIv1) UpdateConnector( } func (c *ConnectorAPIv1) DeleteConnector(ctx context.Context, req *apiv1.DeleteConnectorRequest) (*apiv1.DeleteConnectorResponse, error) { - err := c.cs.Delete(ctx, req.Id) + err := c.connectorOrchestrator.Delete(ctx, req.Id) if err != nil { return nil, status.ConnectorError(cerrors.Errorf("failed to delete connector: %w", err)) @@ -172,7 +189,7 @@ func (c *ConnectorAPIv1) ValidateConnector( ctx context.Context, req *apiv1.ValidateConnectorRequest, ) (*apiv1.ValidateConnectorResponse, error) { - err := c.cs.Validate( + err := c.connectorOrchestrator.Validate( ctx, fromproto.ConnectorType(req.Type), req.Plugin, @@ -185,3 +202,32 @@ func (c *ConnectorAPIv1) ValidateConnector( return &apiv1.ValidateConnectorResponse{}, nil } + +func (c *ConnectorAPIv1) ListConnectorPlugins( + ctx context.Context, + req *apiv1.ListConnectorPluginsRequest, +) (*apiv1.ListConnectorPluginsResponse, error) { + var nameFilter *regexp.Regexp + if req.GetName() != "" { + var err error + nameFilter, err = regexp.Compile("^" + req.GetName() + "$") + if err != nil { + return nil, status.PluginError(cerrors.New("invalid name regex")) + } + } + + mp, err := c.connectorPluginOrchestrator.List(ctx) + if err != nil { + return nil, status.PluginError(err) + } + var plist []*apiv1.ConnectorPluginSpecifications + + for name, v := range mp { + if nameFilter != nil && !nameFilter.MatchString(name) { + continue // don't add to result list, filter didn't match + } + plist = append(plist, toproto.ConnectorPluginSpecifications(name, v)) + } + + return &apiv1.ListConnectorPluginsResponse{Plugins: plist}, nil +} diff --git a/pkg/web/api/connector_v1_test.go b/pkg/web/api/connector_v1_test.go index 01d3749cf..afe5f7f56 100644 --- a/pkg/web/api/connector_v1_test.go +++ b/pkg/web/api/connector_v1_test.go @@ -20,6 +20,8 @@ import ( "testing" "time" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" + "github.com/conduitio/conduit/pkg/connector" "github.com/conduitio/conduit/pkg/foundation/cchan" "github.com/conduitio/conduit/pkg/foundation/cerrors" @@ -41,7 +43,7 @@ func TestConnectorAPIv1_ListConnectors(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) source := newTestSource() destination := newTestDestination() @@ -118,7 +120,7 @@ func TestConnectorAPIv1_ListConnectorsByPipeline(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) source := newTestSource() destination := newTestDestination() @@ -171,7 +173,7 @@ func TestConnectorAPIv1_CreateConnector(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) source := newTestSource() @@ -221,7 +223,7 @@ func TestConnectorAPIv1_InspectConnector_SendRecord(t *testing.T) { defer cancel() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) id := uuid.NewString() rec := generateTestRecord() @@ -258,7 +260,7 @@ func TestConnectorAPIv1_InspectConnector_SendErr(t *testing.T) { defer cancel() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) id := uuid.NewString() ins := inspector.New(log.Nop(), 10) @@ -297,7 +299,7 @@ func TestConnectorAPIv1_InspectConnector_Err(t *testing.T) { defer cancel() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) id := uuid.NewString() err := cerrors.New("not found, sorry") @@ -338,7 +340,7 @@ func TestConnectorAPIv1_GetConnector(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) source := newTestSource() @@ -384,7 +386,7 @@ func TestConnectorAPIv1_UpdateConnector(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) before := newTestSource() after := newTestSource() @@ -437,7 +439,7 @@ func TestConnectorAPIv1_DeleteConnector(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) id := uuid.NewString() @@ -462,7 +464,7 @@ func TestConnectorAPIv1_ValidateConnector(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) config := connector.Config{ Name: "A source connector", @@ -496,7 +498,7 @@ func TestConnectorAPIv1_ValidateConnectorError(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) csMock := apimock.NewConnectorOrchestrator(ctrl) - api := NewConnectorAPIv1(csMock) + api := NewConnectorAPIv1(csMock, nil) config := connector.Config{ Name: "A source connector", @@ -522,6 +524,69 @@ func TestConnectorAPIv1_ValidateConnectorError(t *testing.T) { is.True(err != nil) } +func TestConnectorAPIv1_ListConnectorPluginsByName(t *testing.T) { + is := is.New(t) + + ctx := context.Background() + ctrl := gomock.NewController(t) + cpoMock := apimock.NewConnectorPluginOrchestrator(ctrl) + api := NewConnectorAPIv1(nil, cpoMock) + + names := []string{"do-not-want-this-plugin", "want-p1", "want-p2", "skip", "another-skipped"} + + plsMap := make(map[string]connectorPlugin.Specification) + pls := make([]connectorPlugin.Specification, 0) + + for _, name := range names { + ps := connectorPlugin.Specification{ + Name: name, + Description: "desc", + Version: "v1.0", + Author: "Aaron", + SourceParams: map[string]connectorPlugin.Parameter{ + "param": { + Type: connectorPlugin.ParameterTypeString, + Validations: []connectorPlugin.Validation{{ + Type: connectorPlugin.ValidationTypeRequired, + }}, + }, + }, + DestinationParams: map[string]connectorPlugin.Parameter{}, + } + pls = append(pls, ps) + plsMap[name] = ps + } + + cpoMock.EXPECT(). + List(ctx). + Return(plsMap, nil). + Times(1) + + want := &apiv1.ListConnectorPluginsResponse{ + Plugins: []*apiv1.ConnectorPluginSpecifications{ + toproto.ConnectorPluginSpecifications(pls[1].Name, pls[1]), + toproto.ConnectorPluginSpecifications(pls[2].Name, pls[2]), + }, + } + + got, err := api.ListConnectorPlugins( + ctx, + &apiv1.ListConnectorPluginsRequest{Name: "want-.*"}, + ) + + is.NoErr(err) + + sortPlugins := func(p []*apiv1.ConnectorPluginSpecifications) { + sort.Slice(p, func(i, j int) bool { + return p[i].Name < p[j].Name + }) + } + + sortPlugins(want.Plugins) + sortPlugins(got.Plugins) + is.Equal(want, got) +} + func sortConnectors(c []*apiv1.Connector) { sort.Slice(c, func(i, j int) bool { return c[i].Id < c[j].Id diff --git a/pkg/web/api/mock/connector_plugin.go b/pkg/web/api/mock/connector_plugin.go new file mode 100644 index 000000000..a8a356d92 --- /dev/null +++ b/pkg/web/api/mock/connector_plugin.go @@ -0,0 +1,56 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/conduitio/conduit/pkg/web/api (interfaces: ConnectorPluginOrchestrator) +// +// Generated by this command: +// +// mockgen -destination=mock/connector_plugin.go -package=mock -mock_names=ConnectorPluginOrchestrator=ConnectorPluginOrchestrator . ConnectorPluginOrchestrator +// + +// Package mock is a generated GoMock package. +package mock + +import ( + context "context" + reflect "reflect" + + connector "github.com/conduitio/conduit/pkg/plugin/connector" + gomock "go.uber.org/mock/gomock" +) + +// ConnectorPluginOrchestrator is a mock of ConnectorPluginOrchestrator interface. +type ConnectorPluginOrchestrator struct { + ctrl *gomock.Controller + recorder *ConnectorPluginOrchestratorMockRecorder +} + +// ConnectorPluginOrchestratorMockRecorder is the mock recorder for ConnectorPluginOrchestrator. +type ConnectorPluginOrchestratorMockRecorder struct { + mock *ConnectorPluginOrchestrator +} + +// NewConnectorPluginOrchestrator creates a new mock instance. +func NewConnectorPluginOrchestrator(ctrl *gomock.Controller) *ConnectorPluginOrchestrator { + mock := &ConnectorPluginOrchestrator{ctrl: ctrl} + mock.recorder = &ConnectorPluginOrchestratorMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *ConnectorPluginOrchestrator) EXPECT() *ConnectorPluginOrchestratorMockRecorder { + return m.recorder +} + +// List mocks base method. +func (m *ConnectorPluginOrchestrator) List(arg0 context.Context) (map[string]connector.Specification, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "List", arg0) + ret0, _ := ret[0].(map[string]connector.Specification) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// List indicates an expected call of List. +func (mr *ConnectorPluginOrchestratorMockRecorder) List(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*ConnectorPluginOrchestrator)(nil).List), arg0) +} diff --git a/pkg/web/api/mock/plugin.go b/pkg/web/api/mock/plugin.go deleted file mode 100644 index 8e2496980..000000000 --- a/pkg/web/api/mock/plugin.go +++ /dev/null @@ -1,56 +0,0 @@ -// Code generated by MockGen. DO NOT EDIT. -// Source: github.com/conduitio/conduit/pkg/web/api (interfaces: PluginOrchestrator) -// -// Generated by this command: -// -// mockgen -destination=mock/plugin.go -package=mock -mock_names=PluginOrchestrator=PluginOrchestrator . PluginOrchestrator -// - -// Package mock is a generated GoMock package. -package mock - -import ( - context "context" - reflect "reflect" - - plugin "github.com/conduitio/conduit/pkg/plugin" - gomock "go.uber.org/mock/gomock" -) - -// PluginOrchestrator is a mock of PluginOrchestrator interface. -type PluginOrchestrator struct { - ctrl *gomock.Controller - recorder *PluginOrchestratorMockRecorder -} - -// PluginOrchestratorMockRecorder is the mock recorder for PluginOrchestrator. -type PluginOrchestratorMockRecorder struct { - mock *PluginOrchestrator -} - -// NewPluginOrchestrator creates a new mock instance. -func NewPluginOrchestrator(ctrl *gomock.Controller) *PluginOrchestrator { - mock := &PluginOrchestrator{ctrl: ctrl} - mock.recorder = &PluginOrchestratorMockRecorder{mock} - return mock -} - -// EXPECT returns an object that allows the caller to indicate expected use. -func (m *PluginOrchestrator) EXPECT() *PluginOrchestratorMockRecorder { - return m.recorder -} - -// List mocks base method. -func (m *PluginOrchestrator) List(arg0 context.Context) (map[string]plugin.Specification, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "List", arg0) - ret0, _ := ret[0].(map[string]plugin.Specification) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// List indicates an expected call of List. -func (mr *PluginOrchestratorMockRecorder) List(arg0 any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*PluginOrchestrator)(nil).List), arg0) -} diff --git a/pkg/web/api/mock/processor_plugin.go b/pkg/web/api/mock/processor_plugin.go new file mode 100644 index 000000000..565bfc55d --- /dev/null +++ b/pkg/web/api/mock/processor_plugin.go @@ -0,0 +1,56 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/conduitio/conduit/pkg/web/api (interfaces: ProcessorPluginOrchestrator) +// +// Generated by this command: +// +// mockgen -destination=mock/processor_plugin.go -package=mock -mock_names=ProcessorPluginOrchestrator=ProcessorPluginOrchestrator . ProcessorPluginOrchestrator +// + +// Package mock is a generated GoMock package. +package mock + +import ( + context "context" + reflect "reflect" + + sdk "github.com/conduitio/conduit-processor-sdk" + gomock "go.uber.org/mock/gomock" +) + +// ProcessorPluginOrchestrator is a mock of ProcessorPluginOrchestrator interface. +type ProcessorPluginOrchestrator struct { + ctrl *gomock.Controller + recorder *ProcessorPluginOrchestratorMockRecorder +} + +// ProcessorPluginOrchestratorMockRecorder is the mock recorder for ProcessorPluginOrchestrator. +type ProcessorPluginOrchestratorMockRecorder struct { + mock *ProcessorPluginOrchestrator +} + +// NewProcessorPluginOrchestrator creates a new mock instance. +func NewProcessorPluginOrchestrator(ctrl *gomock.Controller) *ProcessorPluginOrchestrator { + mock := &ProcessorPluginOrchestrator{ctrl: ctrl} + mock.recorder = &ProcessorPluginOrchestratorMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *ProcessorPluginOrchestrator) EXPECT() *ProcessorPluginOrchestratorMockRecorder { + return m.recorder +} + +// List mocks base method. +func (m *ProcessorPluginOrchestrator) List(arg0 context.Context) (map[string]sdk.Specification, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "List", arg0) + ret0, _ := ret[0].(map[string]sdk.Specification) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// List indicates an expected call of List. +func (mr *ProcessorPluginOrchestratorMockRecorder) List(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*ProcessorPluginOrchestrator)(nil).List), arg0) +} diff --git a/pkg/web/api/plugin_v1.go b/pkg/web/api/plugin_v1.go index afb131746..cbbd4af7b 100644 --- a/pkg/web/api/plugin_v1.go +++ b/pkg/web/api/plugin_v1.go @@ -19,34 +19,29 @@ import ( "regexp" "github.com/conduitio/conduit/pkg/foundation/cerrors" - "github.com/conduitio/conduit/pkg/plugin" "github.com/conduitio/conduit/pkg/web/api/status" "github.com/conduitio/conduit/pkg/web/api/toproto" apiv1 "github.com/conduitio/conduit/proto/api/v1" "google.golang.org/grpc" ) -//go:generate mockgen -destination=mock/plugin.go -package=mock -mock_names=PluginOrchestrator=PluginOrchestrator . PluginOrchestrator - -// PluginOrchestrator defines a CRUD interface that manages the Plugin resource. -type PluginOrchestrator interface { - // List will return all plugins' specs. - List(ctx context.Context) (map[string]plugin.Specification, error) -} - type PluginAPIv1 struct { apiv1.UnimplementedPluginServiceServer - ps PluginOrchestrator + connectorPluginOrchestrator ConnectorPluginOrchestrator } -func NewPluginAPIv1(ps PluginOrchestrator) *PluginAPIv1 { - return &PluginAPIv1{ps: ps} +func NewPluginAPIv1( + cpo ConnectorPluginOrchestrator, +) *PluginAPIv1 { + return &PluginAPIv1{connectorPluginOrchestrator: cpo} } func (p *PluginAPIv1) Register(srv *grpc.Server) { apiv1.RegisterPluginServiceServer(srv, p) } +// Deprecated: this is here for backwards compatibility with the old plugin API. +// Use ListConnectorPlugins instead. func (p *PluginAPIv1) ListPlugins( ctx context.Context, req *apiv1.ListPluginsRequest, @@ -60,7 +55,7 @@ func (p *PluginAPIv1) ListPlugins( } } - mp, err := p.ps.List(ctx) + mp, err := p.connectorPluginOrchestrator.List(ctx) if err != nil { return nil, status.PluginError(err) } @@ -70,7 +65,7 @@ func (p *PluginAPIv1) ListPlugins( if nameFilter != nil && !nameFilter.MatchString(name) { continue // don't add to result list, filter didn't match } - plist = append(plist, toproto.Plugin(name, v)) + plist = append(plist, toproto.PluginSpecifications(name, v)) } return &apiv1.ListPluginsResponse{Plugins: plist}, nil diff --git a/pkg/web/api/plugin_v1_test.go b/pkg/web/api/plugin_v1_test.go index 68bc4e892..300218c62 100644 --- a/pkg/web/api/plugin_v1_test.go +++ b/pkg/web/api/plugin_v1_test.go @@ -19,7 +19,7 @@ import ( "sort" "testing" - "github.com/conduitio/conduit/pkg/plugin" + connectorPlugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/web/api/mock" "github.com/conduitio/conduit/pkg/web/api/toproto" apiv1 "github.com/conduitio/conduit/proto/api/v1" @@ -27,40 +27,41 @@ import ( "go.uber.org/mock/gomock" ) +// Deprecated: testing the old plugin API. func TestPluginAPIv1_ListPluginByName(t *testing.T) { is := is.New(t) ctx := context.Background() ctrl := gomock.NewController(t) - psMock := mock.NewPluginOrchestrator(ctrl) - api := NewPluginAPIv1(psMock) + cpoMock := mock.NewConnectorPluginOrchestrator(ctrl) + api := NewPluginAPIv1(cpoMock) names := []string{"do-not-want-this-plugin", "want-p1", "want-p2", "skip", "another-skipped"} - plsMap := make(map[string]plugin.Specification) - pls := make([]plugin.Specification, 0) + plsMap := make(map[string]connectorPlugin.Specification) + pls := make([]connectorPlugin.Specification, 0) for _, name := range names { - ps := plugin.Specification{ + ps := connectorPlugin.Specification{ Name: name, Description: "desc", Version: "v1.0", Author: "Aaron", - SourceParams: map[string]plugin.Parameter{ + SourceParams: map[string]connectorPlugin.Parameter{ "param": { - Type: plugin.ParameterTypeString, - Validations: []plugin.Validation{{ - Type: plugin.ValidationTypeRequired, + Type: connectorPlugin.ParameterTypeString, + Validations: []connectorPlugin.Validation{{ + Type: connectorPlugin.ValidationTypeRequired, }}, }, }, - DestinationParams: map[string]plugin.Parameter{}, + DestinationParams: map[string]connectorPlugin.Parameter{}, } pls = append(pls, ps) plsMap[name] = ps } - psMock.EXPECT(). + cpoMock.EXPECT(). List(ctx). Return(plsMap, nil). Times(1) @@ -95,13 +96,13 @@ func TestPluginAPIv1_ListPluginByName(t *testing.T) { is.NoErr(err) + sortPlugins := func(p []*apiv1.PluginSpecifications) { + sort.Slice(p, func(i, j int) bool { + return p[i].Name < p[j].Name + }) + } + sortPlugins(want.Plugins) sortPlugins(got.Plugins) is.Equal(want, got) } - -func sortPlugins(p []*apiv1.PluginSpecifications) { - sort.Slice(p, func(i, j int) bool { - return p[i].Name < p[j].Name - }) -} diff --git a/pkg/web/api/processor_v1.go b/pkg/web/api/processor_v1.go index 4d8525731..7bac5563b 100644 --- a/pkg/web/api/processor_v1.go +++ b/pkg/web/api/processor_v1.go @@ -15,11 +15,16 @@ //go:generate mockgen -destination=mock/processor.go -package=mock -mock_names=ProcessorOrchestrator=ProcessorOrchestrator . ProcessorOrchestrator //go:generate mockgen -destination=mock/processor_service_in.go -package=mock -mock_names=ProcessorService_InspectProcessorInServer=ProcessorService_InspectProcessorInServer github.com/conduitio/conduit/proto/api/v1 ProcessorService_InspectProcessorInServer //go:generate mockgen -destination=mock/processor_service_out.go -package=mock -mock_names=ProcessorService_InspectProcessorOutServer=ProcessorService_InspectProcessorOutServer github.com/conduitio/conduit/proto/api/v1 ProcessorService_InspectProcessorOutServer +//go:generate mockgen -destination=mock/processor_plugin.go -package=mock -mock_names=ProcessorPluginOrchestrator=ProcessorPluginOrchestrator . ProcessorPluginOrchestrator package api import ( "context" + "regexp" + "slices" + + processorSdk "github.com/conduitio/conduit-processor-sdk" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/inspector" @@ -31,14 +36,14 @@ import ( "google.golang.org/grpc" ) -// ProcessorOrchestrator defines a CRUD interface that manages the Interface resource. +// ProcessorOrchestrator defines a CRUD interface that manages the processor resource. type ProcessorOrchestrator interface { List(ctx context.Context) map[string]*processor.Instance - // Get will return a single Interface or an error if it doesn't exist. + // Get will return a single processor or an error if it doesn't exist. Get(ctx context.Context, id string) (*processor.Instance, error) - // Create will make a new Interface. + // Create will make a new processor. Create(ctx context.Context, procType string, parent processor.Parent, cfg processor.Config, condition string) (*processor.Instance, error) - // Update will update a Interface's config. + // Update will update a processor's config. Update(ctx context.Context, id string, cfg processor.Config) (*processor.Instance, error) // Delete removes a processor Delete(ctx context.Context, id string) error @@ -48,14 +53,25 @@ type ProcessorOrchestrator interface { InspectOut(ctx context.Context, id string) (*inspector.Session, error) } +type ProcessorPluginOrchestrator interface { + // List will return all processor plugins' specs. + List(ctx context.Context) (map[string]processorSdk.Specification, error) +} + type ProcessorAPIv1 struct { apiv1.UnimplementedProcessorServiceServer - ps ProcessorOrchestrator + processorOrchestrator ProcessorOrchestrator + processorPluginOrchestrator ProcessorPluginOrchestrator } // NewProcessorAPIv1 returns a new processor API server. -func NewProcessorAPIv1(ps ProcessorOrchestrator) *ProcessorAPIv1 { - return &ProcessorAPIv1{ps: ps} +func NewProcessorAPIv1( + po ProcessorOrchestrator, + ppo ProcessorPluginOrchestrator) *ProcessorAPIv1 { + return &ProcessorAPIv1{ + processorOrchestrator: po, + processorPluginOrchestrator: ppo, + } } // Register registers the service in the server. @@ -67,11 +83,11 @@ func (p *ProcessorAPIv1) ListProcessors( ctx context.Context, req *apiv1.ListProcessorsRequest, ) (*apiv1.ListProcessorsResponse, error) { - list := p.ps.List(ctx) + list := p.processorOrchestrator.List(ctx) var plist []*apiv1.Processor for _, v := range list { - if len(req.ParentIds) == 0 || p.containsString(req.ParentIds, v.Parent.ID) { + if len(req.ParentIds) == 0 || slices.Contains(req.ParentIds, v.Parent.ID) { plist = append(plist, toproto.Processor(v)) } } @@ -87,7 +103,7 @@ func (p *ProcessorAPIv1) InspectProcessorIn( return status.ProcessorError(cerrors.ErrEmptyID) } - session, err := p.ps.InspectIn(server.Context(), req.Id) + session, err := p.processorOrchestrator.InspectIn(server.Context(), req.Id) if err != nil { return status.ProcessorError(cerrors.Errorf("failed to inspect processor: %w", err)) } @@ -117,7 +133,7 @@ func (p *ProcessorAPIv1) InspectProcessorOut( return status.ProcessorError(cerrors.ErrEmptyID) } - session, err := p.ps.InspectOut(server.Context(), req.Id) + session, err := p.processorOrchestrator.InspectOut(server.Context(), req.Id) if err != nil { return status.ProcessorError(cerrors.Errorf("failed to inspect processor: %w", err)) } @@ -149,7 +165,7 @@ func (p *ProcessorAPIv1) GetProcessor( } // fetch the processor from the ProcessorOrchestrator - pr, err := p.ps.Get(ctx, req.Id) + pr, err := p.processorOrchestrator.Get(ctx, req.Id) if err != nil { return nil, status.ProcessorError(cerrors.Errorf("failed to get processor by ID: %w", err)) } @@ -165,9 +181,19 @@ func (p *ProcessorAPIv1) CreateProcessor( ctx context.Context, req *apiv1.CreateProcessorRequest, ) (*apiv1.CreateProcessorResponse, error) { - created, err := p.ps.Create( + //nolint:staticcheck // we're fine with allowing Type for some time more + if req.Type != "" && req.Plugin != "" { + return nil, status.ProcessorError(cerrors.New("only one of [type, plugin] can be specified")) + } + plugin := req.Plugin + if plugin == "" { + //nolint:staticcheck // we're fine with allowing Type for some time more + plugin = req.Type + } + + created, err := p.processorOrchestrator.Create( ctx, - req.Type, + plugin, fromproto.ProcessorParent(req.Parent), fromproto.ProcessorConfig(req.Config), req.Condition, @@ -190,7 +216,7 @@ func (p *ProcessorAPIv1) UpdateProcessor( return nil, cerrors.ErrEmptyID } - updated, err := p.ps.Update(ctx, req.Id, fromproto.ProcessorConfig(req.Config)) + updated, err := p.processorOrchestrator.Update(ctx, req.Id, fromproto.ProcessorConfig(req.Config)) if err != nil { return nil, status.ProcessorError(cerrors.Errorf("failed to update processor: %w", err)) @@ -202,7 +228,7 @@ func (p *ProcessorAPIv1) UpdateProcessor( } func (p *ProcessorAPIv1) DeleteProcessor(ctx context.Context, req *apiv1.DeleteProcessorRequest) (*apiv1.DeleteProcessorResponse, error) { - err := p.ps.Delete(ctx, req.Id) + err := p.processorOrchestrator.Delete(ctx, req.Id) if err != nil { return nil, status.ProcessorError(cerrors.Errorf("failed to delete processor: %w", err)) @@ -211,11 +237,31 @@ func (p *ProcessorAPIv1) DeleteProcessor(ctx context.Context, req *apiv1.DeleteP return &apiv1.DeleteProcessorResponse{}, nil } -func (p *ProcessorAPIv1) containsString(a []string, s string) bool { - for _, v := range a { - if v == s { - return true +func (p *ProcessorAPIv1) ListProcessorPlugins( + ctx context.Context, + req *apiv1.ListProcessorPluginsRequest, +) (*apiv1.ListProcessorPluginsResponse, error) { + var nameFilter *regexp.Regexp + if req.GetName() != "" { + var err error + nameFilter, err = regexp.Compile("^" + req.GetName() + "$") + if err != nil { + return nil, status.PluginError(cerrors.New("invalid name regex")) } } - return false + + mp, err := p.processorPluginOrchestrator.List(ctx) + if err != nil { + return nil, status.PluginError(err) + } + var plist []*apiv1.ProcessorPluginSpecifications + + for name, v := range mp { + if nameFilter != nil && !nameFilter.MatchString(name) { + continue // don't add to result list, filter didn't match + } + plist = append(plist, toproto.ProcessorPluginSpecifications(name, v)) + } + + return &apiv1.ListProcessorPluginsResponse{Plugins: plist}, nil } diff --git a/pkg/web/api/processor_v1_test.go b/pkg/web/api/processor_v1_test.go index 927da6a55..3280bbec4 100644 --- a/pkg/web/api/processor_v1_test.go +++ b/pkg/web/api/processor_v1_test.go @@ -20,12 +20,14 @@ import ( "testing" "time" + "github.com/conduitio/conduit-commons/config" + processorSdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/foundation/cchan" "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/foundation/log" "github.com/conduitio/conduit/pkg/inspector" "github.com/conduitio/conduit/pkg/processor" - procmock "github.com/conduitio/conduit/pkg/processor/mock" apimock "github.com/conduitio/conduit/pkg/web/api/mock" "github.com/conduitio/conduit/pkg/web/api/toproto" apiv1 "github.com/conduitio/conduit/proto/api/v1" @@ -41,8 +43,7 @@ func TestProcessorAPIv1_ListProcessors(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) psMock := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(psMock) - p := procmock.NewProcessor(ctrl) + api := NewProcessorAPIv1(psMock, nil) config := processor.Config{ Settings: map[string]string{"titan": "armored"}, @@ -51,26 +52,24 @@ func TestProcessorAPIv1_ListProcessors(t *testing.T) { now := time.Now() prs := []*processor.Instance{ { - ID: uuid.NewString(), - Type: "Pants", + ID: uuid.NewString(), + Plugin: "Pants", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypeConnector, }, Config: config, - Processor: p, UpdatedAt: now, CreatedAt: now, }, { - ID: uuid.NewString(), - Type: "Pants Too", + ID: uuid.NewString(), + Plugin: "Pants Too", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypeConnector, }, Config: config, - Processor: p, UpdatedAt: now, CreatedAt: now, }, @@ -83,8 +82,8 @@ func TestProcessorAPIv1_ListProcessors(t *testing.T) { want := &apiv1.ListProcessorsResponse{Processors: []*apiv1.Processor{ { - Id: prs[0].ID, - Type: prs[0].Type, + Id: prs[0].ID, + Plugin: prs[0].Plugin, Config: &apiv1.Processor_Config{ Settings: prs[0].Config.Settings, }, @@ -97,8 +96,8 @@ func TestProcessorAPIv1_ListProcessors(t *testing.T) { }, { - Id: prs[1].ID, - Type: prs[1].Type, + Id: prs[1].ID, + Plugin: prs[1].Plugin, Config: &apiv1.Processor_Config{ Settings: prs[1].Config.Settings, }, @@ -126,8 +125,7 @@ func TestProcessorAPIv1_ListProcessorsByParents(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) psMock := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(psMock) - p := procmock.NewProcessor(ctrl) + api := NewProcessorAPIv1(psMock, nil) config := processor.Config{ Settings: map[string]string{"titan": "armored"}, @@ -137,50 +135,46 @@ func TestProcessorAPIv1_ListProcessorsByParents(t *testing.T) { sharedParent := uuid.NewString() prs := []*processor.Instance{ { - ID: uuid.NewString(), - Type: "Pants", + ID: uuid.NewString(), + Plugin: "Pants", Parent: processor.Parent{ ID: sharedParent, Type: processor.ParentTypeConnector, }, Config: config, - Processor: p, UpdatedAt: now, CreatedAt: now, }, { - ID: uuid.NewString(), - Type: "Pants Too", + ID: uuid.NewString(), + Plugin: "Pants Too", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypeConnector, }, Config: config, - Processor: p, UpdatedAt: now, CreatedAt: now, }, { - ID: uuid.NewString(), - Type: "Pants Thrice", + ID: uuid.NewString(), + Plugin: "Pants Thrice", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypePipeline, }, Config: processor.Config{}, - Processor: p, UpdatedAt: now, CreatedAt: now, }, { - ID: uuid.NewString(), - Type: "Shorts", + ID: uuid.NewString(), + Plugin: "Shorts", Parent: processor.Parent{ ID: sharedParent, Type: processor.ParentTypePipeline, }, Config: processor.Config{}, - Processor: p, UpdatedAt: now, CreatedAt: now, }, @@ -195,8 +189,8 @@ func TestProcessorAPIv1_ListProcessorsByParents(t *testing.T) { want := &apiv1.ListProcessorsResponse{Processors: []*apiv1.Processor{ { - Id: prs[0].ID, - Type: prs[0].Type, + Id: prs[0].ID, + Plugin: prs[0].Plugin, Config: &apiv1.Processor_Config{ Settings: prs[0].Config.Settings, }, @@ -209,8 +203,8 @@ func TestProcessorAPIv1_ListProcessorsByParents(t *testing.T) { }, { - Id: prs[2].ID, - Type: prs[2].Type, + Id: prs[2].ID, + Plugin: prs[2].Plugin, Config: &apiv1.Processor_Config{ Settings: prs[2].Config.Settings, }, @@ -222,8 +216,8 @@ func TestProcessorAPIv1_ListProcessorsByParents(t *testing.T) { UpdatedAt: timestamppb.New(prs[1].UpdatedAt), }, { - Id: prs[3].ID, - Type: prs[3].Type, + Id: prs[3].ID, + Plugin: prs[3].Plugin, Config: &apiv1.Processor_Config{ Settings: prs[3].Config.Settings, }, @@ -251,8 +245,7 @@ func TestProcessorAPIv1_CreateProcessor(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) psMock := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(psMock) - p := procmock.NewProcessor(ctrl) + api := NewProcessorAPIv1(psMock, nil) config := processor.Config{ Settings: map[string]string{"titan": "armored"}, @@ -260,23 +253,22 @@ func TestProcessorAPIv1_CreateProcessor(t *testing.T) { now := time.Now() pr := &processor.Instance{ - ID: uuid.NewString(), - Type: "Pants", + ID: uuid.NewString(), + Plugin: "Pants", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypeConnector, }, Config: config, - Processor: p, Condition: "{{ true }}", UpdatedAt: now, CreatedAt: now, } - psMock.EXPECT().Create(ctx, pr.Type, pr.Parent, config, pr.Condition).Return(pr, nil).Times(1) + psMock.EXPECT().Create(ctx, pr.Plugin, pr.Parent, config, pr.Condition).Return(pr, nil).Times(1) want := &apiv1.CreateProcessorResponse{Processor: &apiv1.Processor{ - Id: pr.ID, - Type: pr.Type, + Id: pr.ID, + Plugin: pr.Plugin, Config: &apiv1.Processor_Config{ Settings: pr.Config.Settings, }, @@ -292,7 +284,7 @@ func TestProcessorAPIv1_CreateProcessor(t *testing.T) { got, err := api.CreateProcessor( ctx, &apiv1.CreateProcessorRequest{ - Type: want.Processor.Type, + Plugin: want.Processor.Plugin, Parent: want.Processor.Parent, Config: want.Processor.Config, Condition: want.Processor.Condition, @@ -309,13 +301,12 @@ func TestProcessorAPIv1_GetProcessor(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) psMock := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(psMock) - p := procmock.NewProcessor(ctrl) + api := NewProcessorAPIv1(psMock, nil) now := time.Now() pr := &processor.Instance{ - ID: uuid.NewString(), - Type: "Pants", + ID: uuid.NewString(), + Plugin: "Pants", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypeConnector, @@ -323,7 +314,6 @@ func TestProcessorAPIv1_GetProcessor(t *testing.T) { Config: processor.Config{ Settings: map[string]string{"titan": "armored"}, }, - Processor: p, CreatedAt: now, UpdatedAt: now, } @@ -331,8 +321,8 @@ func TestProcessorAPIv1_GetProcessor(t *testing.T) { psMock.EXPECT().Get(ctx, pr.ID).Return(pr, nil).Times(1) want := &apiv1.GetProcessorResponse{Processor: &apiv1.Processor{ - Id: pr.ID, - Type: pr.Type, + Id: pr.ID, + Plugin: pr.Plugin, Config: &apiv1.Processor_Config{ Settings: pr.Config.Settings, }, @@ -361,8 +351,7 @@ func TestProcessorAPIv1_UpdateProcessor(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) psMock := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(psMock) - p := procmock.NewProcessor(ctrl) + api := NewProcessorAPIv1(psMock, nil) config := processor.Config{ Settings: map[string]string{"titan": "armored"}, @@ -370,22 +359,21 @@ func TestProcessorAPIv1_UpdateProcessor(t *testing.T) { now := time.Now() pr := &processor.Instance{ - ID: uuid.NewString(), - Type: "Pants", + ID: uuid.NewString(), + Plugin: "Pants", Parent: processor.Parent{ ID: uuid.NewString(), Type: processor.ParentTypeConnector, }, Config: config, - Processor: p, UpdatedAt: now, CreatedAt: now, } psMock.EXPECT().Update(ctx, pr.ID, config).Return(pr, nil).Times(1) want := &apiv1.UpdateProcessorResponse{Processor: &apiv1.Processor{ - Id: pr.ID, - Type: pr.Type, + Id: pr.ID, + Plugin: pr.Plugin, Config: &apiv1.Processor_Config{ Settings: pr.Config.Settings, }, @@ -415,7 +403,7 @@ func TestProcessorAPIv1_DeleteProcessor(t *testing.T) { ctx := context.Background() ctrl := gomock.NewController(t) psMock := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(psMock) + api := NewProcessorAPIv1(psMock, nil) id := uuid.NewString() @@ -441,7 +429,7 @@ func TestProcessorAPIv1_InspectIn_SendRecord(t *testing.T) { defer cancel() ctrl := gomock.NewController(t) orchestrator := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(orchestrator) + api := NewProcessorAPIv1(orchestrator, nil) id := uuid.NewString() rec := generateTestRecord() @@ -478,7 +466,7 @@ func TestProcessorAPIv1_InspectIn_SendErr(t *testing.T) { defer cancel() ctrl := gomock.NewController(t) orchestrator := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(orchestrator) + api := NewProcessorAPIv1(orchestrator, nil) id := uuid.NewString() ins := inspector.New(log.Nop(), 10) @@ -517,7 +505,7 @@ func TestProcessorAPIv1_InspectIn_Err(t *testing.T) { defer cancel() ctrl := gomock.NewController(t) orchestrator := apimock.NewProcessorOrchestrator(ctrl) - api := NewProcessorAPIv1(orchestrator) + api := NewProcessorAPIv1(orchestrator, nil) id := uuid.NewString() err := cerrors.New("not found, sorry") @@ -540,6 +528,68 @@ func TestProcessorAPIv1_InspectIn_Err(t *testing.T) { ) } +func TestProcessorAPIv1_ListProcessorPluginsByName(t *testing.T) { + is := is.New(t) + + ctx := context.Background() + ctrl := gomock.NewController(t) + ppoMock := apimock.NewProcessorPluginOrchestrator(ctrl) + api := NewProcessorAPIv1(nil, ppoMock) + + names := []string{"do-not-want-this-plugin", "want-p1", "want-p2", "skip", "another-skipped"} + + plsMap := make(map[string]processorSdk.Specification) + pls := make([]processorSdk.Specification, 0) + + for _, name := range names { + ps := processorSdk.Specification{ + Name: name, + Description: "desc", + Version: "v1.0", + Author: "Aaron", + Parameters: map[string]config.Parameter{ + "param": { + Type: config.ParameterTypeString, + Validations: []config.Validation{ + config.ValidationRequired{}, + }, + }, + }, + } + pls = append(pls, ps) + plsMap[name] = ps + } + + ppoMock.EXPECT(). + List(ctx). + Return(plsMap, nil). + Times(1) + + want := &apiv1.ListProcessorPluginsResponse{ + Plugins: []*apiv1.ProcessorPluginSpecifications{ + toproto.ProcessorPluginSpecifications(pls[1].Name, pls[1]), + toproto.ProcessorPluginSpecifications(pls[2].Name, pls[2]), + }, + } + + got, err := api.ListProcessorPlugins( + ctx, + &apiv1.ListProcessorPluginsRequest{Name: "want-.*"}, + ) + + is.NoErr(err) + + sortPlugins := func(p []*apiv1.ProcessorPluginSpecifications) { + sort.Slice(p, func(i, j int) bool { + return p[i].Name < p[j].Name + }) + } + + sortPlugins(want.Plugins) + sortPlugins(got.Plugins) + is.Equal(want, got) +} + func sortProcessors(c []*apiv1.Processor) { sort.Slice(c, func(i, j int) bool { return c[i].Id < c[j].Id diff --git a/pkg/web/api/status/status.go b/pkg/web/api/status/status.go index 2328ef2fa..3cbde0377 100644 --- a/pkg/web/api/status/status.go +++ b/pkg/web/api/status/status.go @@ -19,7 +19,7 @@ import ( "github.com/conduitio/conduit/pkg/foundation/cerrors" "github.com/conduitio/conduit/pkg/orchestrator" "github.com/conduitio/conduit/pkg/pipeline" - "github.com/conduitio/conduit/pkg/plugin" + conn_plugin "github.com/conduitio/conduit/pkg/plugin/connector" "github.com/conduitio/conduit/pkg/processor" "google.golang.org/grpc/codes" grpcstatus "google.golang.org/grpc/status" @@ -88,7 +88,7 @@ func codeFromError(err error) codes.Code { return codes.AlreadyExists case cerrors.Is(err, connector.ErrConnectorRunning): return codes.FailedPrecondition - case cerrors.Is(err, &plugin.ValidationError{}): + case cerrors.Is(err, &conn_plugin.ValidationError{}): return codes.FailedPrecondition case cerrors.Is(err, orchestrator.ErrPipelineHasConnectorsAttached): return codes.FailedPrecondition diff --git a/pkg/web/api/toproto/plugin.go b/pkg/web/api/toproto/plugin.go index 2dc7c1092..787939929 100644 --- a/pkg/web/api/toproto/plugin.go +++ b/pkg/web/api/toproto/plugin.go @@ -15,29 +15,32 @@ package toproto import ( - "github.com/conduitio/conduit/pkg/plugin" + configv1 "github.com/conduitio/conduit-commons/proto/config/v1" + processorSdk "github.com/conduitio/conduit-processor-sdk" + "github.com/conduitio/conduit/pkg/plugin/connector" apiv1 "github.com/conduitio/conduit/proto/api/v1" ) func _() { // An "invalid array index" compiler error signifies that the constant values have changed. var vTypes [1]struct{} - _ = vTypes[int(plugin.ValidationTypeRequired)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_REQUIRED)] - _ = vTypes[int(plugin.ValidationTypeGreaterThan)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_GREATER_THAN)] - _ = vTypes[int(plugin.ValidationTypeLessThan)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_LESS_THAN)] - _ = vTypes[int(plugin.ValidationTypeInclusion)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_INCLUSION)] - _ = vTypes[int(plugin.ValidationTypeExclusion)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_EXCLUSION)] - _ = vTypes[int(plugin.ValidationTypeRegex)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_REGEX)] + _ = vTypes[int(connector.ValidationTypeRequired)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_REQUIRED)] + _ = vTypes[int(connector.ValidationTypeGreaterThan)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_GREATER_THAN)] + _ = vTypes[int(connector.ValidationTypeLessThan)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_LESS_THAN)] + _ = vTypes[int(connector.ValidationTypeInclusion)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_INCLUSION)] + _ = vTypes[int(connector.ValidationTypeExclusion)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_EXCLUSION)] + _ = vTypes[int(connector.ValidationTypeRegex)-int(apiv1.PluginSpecifications_Parameter_Validation_TYPE_REGEX)] - _ = vTypes[int(plugin.ParameterTypeString)-int(apiv1.PluginSpecifications_Parameter_TYPE_STRING)] - _ = vTypes[int(plugin.ParameterTypeInt)-int(apiv1.PluginSpecifications_Parameter_TYPE_INT)] - _ = vTypes[int(plugin.ParameterTypeFloat)-int(apiv1.PluginSpecifications_Parameter_TYPE_FLOAT)] - _ = vTypes[int(plugin.ParameterTypeFile)-int(apiv1.PluginSpecifications_Parameter_TYPE_FILE)] - _ = vTypes[int(plugin.ParameterTypeBool)-int(apiv1.PluginSpecifications_Parameter_TYPE_BOOL)] - _ = vTypes[int(plugin.ParameterTypeDuration)-int(apiv1.PluginSpecifications_Parameter_TYPE_DURATION)] + _ = vTypes[int(connector.ParameterTypeString)-int(apiv1.PluginSpecifications_Parameter_TYPE_STRING)] + _ = vTypes[int(connector.ParameterTypeInt)-int(apiv1.PluginSpecifications_Parameter_TYPE_INT)] + _ = vTypes[int(connector.ParameterTypeFloat)-int(apiv1.PluginSpecifications_Parameter_TYPE_FLOAT)] + _ = vTypes[int(connector.ParameterTypeFile)-int(apiv1.PluginSpecifications_Parameter_TYPE_FILE)] + _ = vTypes[int(connector.ParameterTypeBool)-int(apiv1.PluginSpecifications_Parameter_TYPE_BOOL)] + _ = vTypes[int(connector.ParameterTypeDuration)-int(apiv1.PluginSpecifications_Parameter_TYPE_DURATION)] } -func Plugin(name string, in plugin.Specification) *apiv1.PluginSpecifications { +// Deprecated: this is here for backwards compatibility with the old plugin API. +func PluginSpecifications(name string, in connector.Specification) *apiv1.PluginSpecifications { return &apiv1.PluginSpecifications{ Name: name, Summary: in.Summary, @@ -49,7 +52,8 @@ func Plugin(name string, in plugin.Specification) *apiv1.PluginSpecifications { } } -func PluginParamsMap(in map[string]plugin.Parameter) map[string]*apiv1.PluginSpecifications_Parameter { +// Deprecated: this is here for backwards compatibility with the old plugin API. +func PluginParamsMap(in map[string]connector.Parameter) map[string]*apiv1.PluginSpecifications_Parameter { out := make(map[string]*apiv1.PluginSpecifications_Parameter) for k, v := range in { out[k] = &apiv1.PluginSpecifications_Parameter{ @@ -62,18 +66,63 @@ func PluginParamsMap(in map[string]plugin.Parameter) map[string]*apiv1.PluginSpe return out } -func PluginParamValidations(in []plugin.Validation) []*apiv1.PluginSpecifications_Parameter_Validation { +// Deprecated: this is here for backwards compatibility with the old plugin API. +func PluginParamValidations(in []connector.Validation) []*apiv1.PluginSpecifications_Parameter_Validation { // we need an empty slice here so that the returned JSON would be "validations":[] instead of "validations":null out := make([]*apiv1.PluginSpecifications_Parameter_Validation, 0) for _, v := range in { out = append(out, &apiv1.PluginSpecifications_Parameter_Validation{ - Type: ValidationType(v.Type), + Type: apiv1.PluginSpecifications_Parameter_Validation_Type(v.Type), Value: v.Value, }) } return out } -func ValidationType(in plugin.ValidationType) apiv1.PluginSpecifications_Parameter_Validation_Type { - return apiv1.PluginSpecifications_Parameter_Validation_Type(in) +func ConnectorPluginSpecifications(name string, in connector.Specification) *apiv1.ConnectorPluginSpecifications { + return &apiv1.ConnectorPluginSpecifications{ + Name: name, + Summary: in.Summary, + Description: in.Description, + Version: in.Version, + DestinationParams: ConnectorPluginParamsMap(in.DestinationParams), + SourceParams: ConnectorPluginParamsMap(in.SourceParams), + } +} + +func ConnectorPluginParamsMap(in map[string]connector.Parameter) map[string]*configv1.Parameter { + out := make(map[string]*configv1.Parameter) + for k, v := range in { + out[k] = &configv1.Parameter{ + Description: v.Description, + Default: v.Default, + Type: configv1.Parameter_Type(v.Type), + Validations: ConnectorPluginParamValidations(v.Validations), + } + } + return out +} + +func ConnectorPluginParamValidations(in []connector.Validation) []*configv1.Validation { + // we need an empty slice here so that the returned JSON would be "validations":[] instead of "validations":null + out := make([]*configv1.Validation, 0) + for _, v := range in { + out = append(out, &configv1.Validation{ + Type: configv1.Validation_Type(v.Type), + Value: v.Value, + }) + } + return out +} + +func ProcessorPluginSpecifications(name string, in processorSdk.Specification) *apiv1.ProcessorPluginSpecifications { + params := make(map[string]*configv1.Parameter) + in.Parameters.ToProto(params) + return &apiv1.ProcessorPluginSpecifications{ + Name: name, + Summary: in.Summary, + Description: in.Description, + Version: in.Version, + Parameters: params, + } } diff --git a/pkg/web/api/toproto/processor.go b/pkg/web/api/toproto/processor.go index 724eb06f7..0f40d20c7 100644 --- a/pkg/web/api/toproto/processor.go +++ b/pkg/web/api/toproto/processor.go @@ -30,7 +30,7 @@ func _() { func Processor(in *processor.Instance) *apiv1.Processor { return &apiv1.Processor{ Id: in.ID, - Type: in.Type, + Plugin: in.Plugin, CreatedAt: timestamppb.New(in.CreatedAt), UpdatedAt: timestamppb.New(in.UpdatedAt), Config: ProcessorConfig(in.Config), diff --git a/pkg/web/openapi/swagger-ui/api/v1/api.swagger.json b/pkg/web/openapi/swagger-ui/api/v1/api.swagger.json index 1e7f86e8b..d5a3c4ee7 100644 --- a/pkg/web/openapi/swagger-ui/api/v1/api.swagger.json +++ b/pkg/web/openapi/swagger-ui/api/v1/api.swagger.json @@ -156,6 +156,48 @@ ] } }, + "/v1/connectors/plugins": { + "get": { + "operationId": "ConnectorService_ListConnectorPlugins", + "responses": { + "200": { + "description": "", + "schema": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v1ConnectorPluginSpecifications" + } + } + }, + "500": { + "description": "", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + }, + "examples": { + "application/json": { + "code": 13, + "message": "server error", + "details": [] + } + } + } + }, + "parameters": [ + { + "name": "name", + "description": "Regex to filter plugins by name.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "ConnectorService" + ] + } + }, "/v1/connectors/validate": { "post": { "operationId": "ConnectorService_ValidateConnector", @@ -1036,6 +1078,7 @@ }, "/v1/plugins": { "get": { + "summary": "Deprecated: use ConnectorService.ListConnectorPlugins instead.", "operationId": "PluginService_ListPlugins", "responses": { "200": { @@ -1171,6 +1214,48 @@ ] } }, + "/v1/processors/plugins": { + "get": { + "operationId": "ProcessorService_ListProcessorPlugins", + "responses": { + "200": { + "description": "", + "schema": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v1ProcessorPluginSpecifications" + } + } + }, + "500": { + "description": "", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + }, + "examples": { + "application/json": { + "code": 13, + "message": "server error", + "details": [] + } + } + } + }, + "parameters": [ + { + "name": "name", + "description": "Regex to filter plugins by name.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "ProcessorService" + ] + } + }, "/v1/processors/{id}": { "get": { "operationId": "ProcessorService_GetProcessor", @@ -1467,33 +1552,6 @@ } } }, - "ParameterValidation": { - "type": "object", - "properties": { - "type": { - "$ref": "#/definitions/ParameterValidationType" - }, - "value": { - "type": "string", - "description": "The value to be compared with the parameter,\nor a comma separated list in case of Validation.TYPE_INCLUSION or Validation.TYPE_EXCLUSION." - } - }, - "description": "Validation to be made on the parameter." - }, - "ParameterValidationType": { - "type": "string", - "enum": [ - "TYPE_UNSPECIFIED", - "TYPE_REQUIRED", - "TYPE_GREATER_THAN", - "TYPE_LESS_THAN", - "TYPE_INCLUSION", - "TYPE_EXCLUSION", - "TYPE_REGEX" - ], - "default": "TYPE_UNSPECIFIED", - "description": " - TYPE_REQUIRED: Parameter must be present.\n - TYPE_GREATER_THAN: Parameter must be greater than {value}.\n - TYPE_LESS_THAN: Parameter must be less than {value}.\n - TYPE_INCLUSION: Parameter must be included in the comma separated list {value}.\n - TYPE_EXCLUSION: Parameter must not be included in the comma separated list {value}.\n - TYPE_REGEX: Parameter must match the regex {value}." - }, "PipelineDLQ": { "type": "object", "properties": { @@ -1532,40 +1590,32 @@ } } }, - "PluginSpecificationsParameter": { + "PluginSpecificationsParameterValidation": { "type": "object", "properties": { - "description": { - "type": "string" - }, - "default": { - "type": "string" - }, "type": { - "$ref": "#/definitions/PluginSpecificationsParameterType" + "$ref": "#/definitions/PluginSpecificationsParameterValidationType" }, - "validations": { - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/ParameterValidation" - } + "value": { + "type": "string", + "description": "The value to be compared with the parameter,\nor a comma separated list in case of Validation.TYPE_INCLUSION or Validation.TYPE_EXCLUSION." } - } + }, + "description": "Deprecated: use config.v1.Validation instead." }, - "PluginSpecificationsParameterType": { + "PluginSpecificationsParameterValidationType": { "type": "string", "enum": [ "TYPE_UNSPECIFIED", - "TYPE_STRING", - "TYPE_INT", - "TYPE_FLOAT", - "TYPE_BOOL", - "TYPE_FILE", - "TYPE_DURATION" + "TYPE_REQUIRED", + "TYPE_GREATER_THAN", + "TYPE_LESS_THAN", + "TYPE_INCLUSION", + "TYPE_EXCLUSION", + "TYPE_REGEX" ], "default": "TYPE_UNSPECIFIED", - "description": "Type shows the parameter type.\n\n - TYPE_STRING: Parameter is a string.\n - TYPE_INT: Parameter is an integer.\n - TYPE_FLOAT: Parameter is a float.\n - TYPE_BOOL: Parameter is a boolean.\n - TYPE_FILE: Parameter is a file.\n - TYPE_DURATION: Parameter is a duration." + "description": "Deprecated: use config.v1.Validation.Type instead.\n\n - TYPE_REQUIRED: Parameter must be present.\n - TYPE_GREATER_THAN: Parameter must be greater than {value}.\n - TYPE_LESS_THAN: Parameter must be less than {value}.\n - TYPE_INCLUSION: Parameter must be included in the comma separated list {value}.\n - TYPE_EXCLUSION: Parameter must not be included in the comma separated list {value}.\n - TYPE_REGEX: Parameter must match the regex {value}." }, "ProcessorParent": { "type": "object", @@ -1602,6 +1652,73 @@ } } }, + "configv1Parameter": { + "type": "object", + "properties": { + "default": { + "type": "string", + "description": "Default is the default value of the parameter. If there is no default\nvalue use an empty string." + }, + "description": { + "type": "string", + "description": "Description explains what the parameter does and how to configure it." + }, + "type": { + "$ref": "#/definitions/configv1ParameterType", + "description": "Type defines the parameter data type." + }, + "validations": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/configv1Validation" + }, + "description": "Validations are validations to be made on the parameter." + } + }, + "description": "Parameter describes a single config parameter." + }, + "configv1ParameterType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "TYPE_STRING", + "TYPE_INT", + "TYPE_FLOAT", + "TYPE_BOOL", + "TYPE_FILE", + "TYPE_DURATION" + ], + "default": "TYPE_UNSPECIFIED", + "description": "Type shows the parameter type.\n\n - TYPE_STRING: Parameter is a string.\n - TYPE_INT: Parameter is an integer.\n - TYPE_FLOAT: Parameter is a float.\n - TYPE_BOOL: Parameter is a boolean.\n - TYPE_FILE: Parameter is a file.\n - TYPE_DURATION: Parameter is a duration." + }, + "configv1Validation": { + "type": "object", + "properties": { + "type": { + "$ref": "#/definitions/configv1ValidationType" + }, + "value": { + "type": "string", + "description": "The value to be compared with the parameter,\nor a comma separated list in case of Validation.TYPE_INCLUSION or Validation.TYPE_EXCLUSION." + } + }, + "description": "Validation to be made on the parameter." + }, + "configv1ValidationType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "TYPE_REQUIRED", + "TYPE_GREATER_THAN", + "TYPE_LESS_THAN", + "TYPE_INCLUSION", + "TYPE_EXCLUSION", + "TYPE_REGEX" + ], + "default": "TYPE_UNSPECIFIED", + "description": " - TYPE_REQUIRED: Parameter must be present.\n - TYPE_GREATER_THAN: Parameter must be greater than {value}.\n - TYPE_LESS_THAN: Parameter must be less than {value}.\n - TYPE_INCLUSION: Parameter must be included in the comma separated list {value}.\n - TYPE_EXCLUSION: Parameter must not be included in the comma separated list {value}.\n - TYPE_REGEX: Parameter must match the regex {value}." + }, "googlerpcStatus": { "type": "object", "properties": { @@ -1720,6 +1837,46 @@ } } }, + "v1ConnectorPluginSpecifications": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name is the name of the plugin." + }, + "summary": { + "type": "string", + "description": "Summary is a brief description of the plugin and what it does,\nideally not longer than one sentence." + }, + "description": { + "type": "string", + "description": "Description is a longer form field, appropriate for README-like\ntext that the author can provide for documentation about the\nusage of the plugin." + }, + "version": { + "type": "string", + "description": "Version string. Should follow semantic versioning and use the \"v\"\nprefix (e.g. v1.23.4)." + }, + "author": { + "type": "string", + "description": "Author declares the entity that created or maintains this plugin." + }, + "destinationParams": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/configv1Parameter" + }, + "description": "A map that describes parameters available for configuring the\ndestination plugin." + }, + "sourceParams": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/configv1Parameter" + }, + "description": "A map that describes parameters available for configuring the\nsource plugin." + } + }, + "description": "ConnectorPluginSpecifications describes the specifications of a connector plugin." + }, "v1ConnectorType": { "type": "string", "enum": [ @@ -1738,7 +1895,7 @@ }, "plugin": { "type": "string", - "description": "Plugin name is the name of the builtin plugin, or the absolute path of a standalone plugin." + "title": "Used to reference a plugin. Its format is as follows:\n[PLUGIN-TYPE:]PLUGIN-NAME[@VERSION]\nPLUGIN-TYPE: One of: builtin, standalone or any (default).\nPLUGIN-NAME: The name of the plugin as specified in the plugin specifications.\nVERSION: The plugin version as specified in the plugin specifications or latest (default).\nFor more information, see: https://conduit.io/docs/connectors/referencing/" }, "pipelineId": { "type": "string", @@ -1787,6 +1944,9 @@ }, "condition": { "type": "string" + }, + "plugin": { + "type": "string" } } }, @@ -1902,6 +2062,18 @@ } } }, + "v1ListConnectorPluginsResponse": { + "type": "object", + "properties": { + "plugins": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v1ConnectorPluginSpecifications" + } + } + } + }, "v1ListConnectorsResponse": { "type": "object", "properties": { @@ -1936,6 +2108,19 @@ "$ref": "#/definitions/v1PluginSpecifications" } } + }, + "description": "Deprecated: use ConnectorService.ListConnectorPlugins instead." + }, + "v1ListProcessorPluginsResponse": { + "type": "object", + "properties": { + "plugins": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v1ProcessorPluginSpecifications" + } + } } }, "v1ListProcessorsResponse": { @@ -2030,16 +2215,53 @@ "destinationParams": { "type": "object", "additionalProperties": { - "$ref": "#/definitions/PluginSpecificationsParameter" + "$ref": "#/definitions/v1PluginSpecificationsParameter" } }, "sourceParams": { "type": "object", "additionalProperties": { - "$ref": "#/definitions/PluginSpecificationsParameter" + "$ref": "#/definitions/v1PluginSpecificationsParameter" } } - } + }, + "description": "Deprecated: use ConnectorPluginSpecifications instead." + }, + "v1PluginSpecificationsParameter": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "default": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/v1PluginSpecificationsParameterType" + }, + "validations": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/PluginSpecificationsParameterValidation" + } + } + }, + "description": "Deprecated: use config.v1.Parameter instead." + }, + "v1PluginSpecificationsParameterType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "TYPE_STRING", + "TYPE_INT", + "TYPE_FLOAT", + "TYPE_BOOL", + "TYPE_FILE", + "TYPE_DURATION" + ], + "default": "TYPE_UNSPECIFIED", + "description": "Deprecated: use config.v1.Parameter.Type instead.\n\n - TYPE_STRING: Parameter is a string.\n - TYPE_INT: Parameter is an integer.\n - TYPE_FLOAT: Parameter is a float.\n - TYPE_BOOL: Parameter is a boolean.\n - TYPE_FILE: Parameter is a file.\n - TYPE_DURATION: Parameter is a duration." }, "v1Processor": { "type": "object", @@ -2051,16 +2273,16 @@ "config": { "$ref": "#/definitions/v1ProcessorConfig" }, - "type": { + "condition": { + "type": "string", + "title": "Condition is a goTemplate formatted string, the value provided to the template is a sdk.Record, it should evaluate\nto a boolean value, indicating a condition to run the processor for a specific record or not. (template functions\nprovided by `sprig` are injected)" + }, + "plugin": { "type": "string" }, "parent": { "$ref": "#/definitions/ProcessorParent" }, - "condition": { - "type": "string", - "title": "Condition is a goTemplate formatted string, the value provided to the template is a sdk.Record, it should evaluate\nto a boolean value, indicating a condition to run the processor for a specific record or not. (template functions\nprovided by `sprig` are injected)" - }, "createdAt": { "type": "string", "format": "date-time" @@ -2086,6 +2308,39 @@ } } }, + "v1ProcessorPluginSpecifications": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name is the name of the plugin." + }, + "summary": { + "type": "string", + "description": "Summary is a brief description of the plugin and what it does,\nideally not longer than one sentence." + }, + "description": { + "type": "string", + "description": "Description is a longer form field, appropriate for README-like\ntext that the author can provide for documentation about the\nusage of the plugin." + }, + "version": { + "type": "string", + "description": "Version string. Should follow semantic versioning and use the \"v\"\nprefix (e.g. v1.23.4)." + }, + "author": { + "type": "string", + "description": "Author declares the entity that created or maintains this plugin." + }, + "parameters": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/configv1Parameter" + }, + "description": "A map that describes parameters available for configuring the\nprocessor plugin." + } + }, + "description": "ProcessorPluginSpecifications describes the specifications of a processor plugin." + }, "v1Record": { "type": "object", "properties": { diff --git a/proto/api/v1/api.pb.go b/proto/api/v1/api.pb.go index 14a6ace38..65c9a3f4b 100644 --- a/proto/api/v1/api.pb.go +++ b/proto/api/v1/api.pb.go @@ -8,7 +8,8 @@ package apiv1 import ( _ "buf.build/gen/go/grpc-ecosystem/grpc-gateway/protocolbuffers/go/protoc-gen-openapiv2/options" - v1 "github.com/conduitio/conduit-commons/proto/opencdc/v1" + v1 "github.com/conduitio/conduit-commons/proto/config/v1" + v11 "github.com/conduitio/conduit-commons/proto/opencdc/v1" _ "google.golang.org/genproto/googleapis/api/annotations" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" @@ -184,7 +185,9 @@ func (Processor_Parent_Type) EnumDescriptor() ([]byte, []int) { return file_api_v1_api_proto_rawDescGZIP(), []int{2, 0, 0} } -// Type shows the parameter type. +// Deprecated: use config.v1.Parameter.Type instead. +// +// Deprecated: Marked as deprecated in api/v1/api.proto. type PluginSpecifications_Parameter_Type int32 const ( @@ -249,9 +252,12 @@ func (x PluginSpecifications_Parameter_Type) Number() protoreflect.EnumNumber { // Deprecated: Use PluginSpecifications_Parameter_Type.Descriptor instead. func (PluginSpecifications_Parameter_Type) EnumDescriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{58, 0, 0} + return file_api_v1_api_proto_rawDescGZIP(), []int{5, 0, 0} } +// Deprecated: use config.v1.Validation.Type instead. +// +// Deprecated: Marked as deprecated in api/v1/api.proto. type PluginSpecifications_Parameter_Validation_Type int32 const ( @@ -316,7 +322,7 @@ func (x PluginSpecifications_Parameter_Validation_Type) Number() protoreflect.En // Deprecated: Use PluginSpecifications_Parameter_Validation_Type.Descriptor instead. func (PluginSpecifications_Parameter_Validation_Type) EnumDescriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{58, 0, 0, 0} + return file_api_v1_api_proto_rawDescGZIP(), []int{5, 0, 0, 0} } type Pipeline struct { @@ -566,12 +572,12 @@ type Processor struct { Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` Config *Processor_Config `protobuf:"bytes,3,opt,name=config,proto3" json:"config,omitempty"` - Type string `protobuf:"bytes,4,opt,name=type,proto3" json:"type,omitempty"` - Parent *Processor_Parent `protobuf:"bytes,6,opt,name=parent,proto3" json:"parent,omitempty"` // Condition is a goTemplate formatted string, the value provided to the template is a sdk.Record, it should evaluate // to a boolean value, indicating a condition to run the processor for a specific record or not. (template functions // provided by `sprig` are injected) Condition string `protobuf:"bytes,9,opt,name=condition,proto3" json:"condition,omitempty"` + Plugin string `protobuf:"bytes,5,opt,name=plugin,proto3" json:"plugin,omitempty"` + Parent *Processor_Parent `protobuf:"bytes,6,opt,name=parent,proto3" json:"parent,omitempty"` CreatedAt *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` UpdatedAt *timestamppb.Timestamp `protobuf:"bytes,8,opt,name=updated_at,json=updatedAt,proto3" json:"updated_at,omitempty"` } @@ -622,25 +628,25 @@ func (x *Processor) GetConfig() *Processor_Config { return nil } -func (x *Processor) GetType() string { +func (x *Processor) GetCondition() string { if x != nil { - return x.Type + return x.Condition } return "" } -func (x *Processor) GetParent() *Processor_Parent { +func (x *Processor) GetPlugin() string { if x != nil { - return x.Parent + return x.Plugin } - return nil + return "" } -func (x *Processor) GetCondition() string { +func (x *Processor) GetParent() *Processor_Parent { if x != nil { - return x.Condition + return x.Parent } - return "" + return nil } func (x *Processor) GetCreatedAt() *timestamppb.Timestamp { @@ -657,6 +663,312 @@ func (x *Processor) GetUpdatedAt() *timestamppb.Timestamp { return nil } +// ConnectorPluginSpecifications describes the specifications of a connector plugin. +type ConnectorPluginSpecifications struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Name is the name of the plugin. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Summary is a brief description of the plugin and what it does, + // ideally not longer than one sentence. + Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"` + // Description is a longer form field, appropriate for README-like + // text that the author can provide for documentation about the + // usage of the plugin. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Version string. Should follow semantic versioning and use the "v" + // prefix (e.g. v1.23.4). + Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` + // Author declares the entity that created or maintains this plugin. + Author string `protobuf:"bytes,5,opt,name=author,proto3" json:"author,omitempty"` + // A map that describes parameters available for configuring the + // destination plugin. + DestinationParams map[string]*v1.Parameter `protobuf:"bytes,6,rep,name=destination_params,json=destinationParams,proto3" json:"destination_params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // A map that describes parameters available for configuring the + // source plugin. + SourceParams map[string]*v1.Parameter `protobuf:"bytes,7,rep,name=source_params,json=sourceParams,proto3" json:"source_params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *ConnectorPluginSpecifications) Reset() { + *x = ConnectorPluginSpecifications{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_api_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ConnectorPluginSpecifications) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ConnectorPluginSpecifications) ProtoMessage() {} + +func (x *ConnectorPluginSpecifications) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ConnectorPluginSpecifications.ProtoReflect.Descriptor instead. +func (*ConnectorPluginSpecifications) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{3} +} + +func (x *ConnectorPluginSpecifications) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ConnectorPluginSpecifications) GetSummary() string { + if x != nil { + return x.Summary + } + return "" +} + +func (x *ConnectorPluginSpecifications) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *ConnectorPluginSpecifications) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *ConnectorPluginSpecifications) GetAuthor() string { + if x != nil { + return x.Author + } + return "" +} + +func (x *ConnectorPluginSpecifications) GetDestinationParams() map[string]*v1.Parameter { + if x != nil { + return x.DestinationParams + } + return nil +} + +func (x *ConnectorPluginSpecifications) GetSourceParams() map[string]*v1.Parameter { + if x != nil { + return x.SourceParams + } + return nil +} + +// ProcessorPluginSpecifications describes the specifications of a processor plugin. +type ProcessorPluginSpecifications struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Name is the name of the plugin. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Summary is a brief description of the plugin and what it does, + // ideally not longer than one sentence. + Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"` + // Description is a longer form field, appropriate for README-like + // text that the author can provide for documentation about the + // usage of the plugin. + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + // Version string. Should follow semantic versioning and use the "v" + // prefix (e.g. v1.23.4). + Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` + // Author declares the entity that created or maintains this plugin. + Author string `protobuf:"bytes,5,opt,name=author,proto3" json:"author,omitempty"` + // A map that describes parameters available for configuring the + // processor plugin. + Parameters map[string]*v1.Parameter `protobuf:"bytes,6,rep,name=parameters,proto3" json:"parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *ProcessorPluginSpecifications) Reset() { + *x = ProcessorPluginSpecifications{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_api_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ProcessorPluginSpecifications) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ProcessorPluginSpecifications) ProtoMessage() {} + +func (x *ProcessorPluginSpecifications) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ProcessorPluginSpecifications.ProtoReflect.Descriptor instead. +func (*ProcessorPluginSpecifications) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{4} +} + +func (x *ProcessorPluginSpecifications) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ProcessorPluginSpecifications) GetSummary() string { + if x != nil { + return x.Summary + } + return "" +} + +func (x *ProcessorPluginSpecifications) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *ProcessorPluginSpecifications) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *ProcessorPluginSpecifications) GetAuthor() string { + if x != nil { + return x.Author + } + return "" +} + +func (x *ProcessorPluginSpecifications) GetParameters() map[string]*v1.Parameter { + if x != nil { + return x.Parameters + } + return nil +} + +// Deprecated: use ConnectorPluginSpecifications instead. +// +// Deprecated: Marked as deprecated in api/v1/api.proto. +type PluginSpecifications struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` + Author string `protobuf:"bytes,5,opt,name=author,proto3" json:"author,omitempty"` + DestinationParams map[string]*PluginSpecifications_Parameter `protobuf:"bytes,6,rep,name=destination_params,json=destinationParams,proto3" json:"destination_params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + SourceParams map[string]*PluginSpecifications_Parameter `protobuf:"bytes,7,rep,name=source_params,json=sourceParams,proto3" json:"source_params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *PluginSpecifications) Reset() { + *x = PluginSpecifications{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_api_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PluginSpecifications) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PluginSpecifications) ProtoMessage() {} + +func (x *PluginSpecifications) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PluginSpecifications.ProtoReflect.Descriptor instead. +func (*PluginSpecifications) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{5} +} + +func (x *PluginSpecifications) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *PluginSpecifications) GetSummary() string { + if x != nil { + return x.Summary + } + return "" +} + +func (x *PluginSpecifications) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *PluginSpecifications) GetVersion() string { + if x != nil { + return x.Version + } + return "" +} + +func (x *PluginSpecifications) GetAuthor() string { + if x != nil { + return x.Author + } + return "" +} + +func (x *PluginSpecifications) GetDestinationParams() map[string]*PluginSpecifications_Parameter { + if x != nil { + return x.DestinationParams + } + return nil +} + +func (x *PluginSpecifications) GetSourceParams() map[string]*PluginSpecifications_Parameter { + if x != nil { + return x.SourceParams + } + return nil +} + type ListPipelinesRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -669,7 +981,7 @@ type ListPipelinesRequest struct { func (x *ListPipelinesRequest) Reset() { *x = ListPipelinesRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[3] + mi := &file_api_v1_api_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -682,7 +994,7 @@ func (x *ListPipelinesRequest) String() string { func (*ListPipelinesRequest) ProtoMessage() {} func (x *ListPipelinesRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[3] + mi := &file_api_v1_api_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -695,7 +1007,7 @@ func (x *ListPipelinesRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ListPipelinesRequest.ProtoReflect.Descriptor instead. func (*ListPipelinesRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{3} + return file_api_v1_api_proto_rawDescGZIP(), []int{6} } func (x *ListPipelinesRequest) GetName() string { @@ -716,7 +1028,7 @@ type ListPipelinesResponse struct { func (x *ListPipelinesResponse) Reset() { *x = ListPipelinesResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[4] + mi := &file_api_v1_api_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -729,7 +1041,7 @@ func (x *ListPipelinesResponse) String() string { func (*ListPipelinesResponse) ProtoMessage() {} func (x *ListPipelinesResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[4] + mi := &file_api_v1_api_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -742,7 +1054,7 @@ func (x *ListPipelinesResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ListPipelinesResponse.ProtoReflect.Descriptor instead. func (*ListPipelinesResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{4} + return file_api_v1_api_proto_rawDescGZIP(), []int{7} } func (x *ListPipelinesResponse) GetPipelines() []*Pipeline { @@ -763,7 +1075,7 @@ type CreatePipelineRequest struct { func (x *CreatePipelineRequest) Reset() { *x = CreatePipelineRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[5] + mi := &file_api_v1_api_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -776,7 +1088,7 @@ func (x *CreatePipelineRequest) String() string { func (*CreatePipelineRequest) ProtoMessage() {} func (x *CreatePipelineRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[5] + mi := &file_api_v1_api_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -789,7 +1101,7 @@ func (x *CreatePipelineRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use CreatePipelineRequest.ProtoReflect.Descriptor instead. func (*CreatePipelineRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{5} + return file_api_v1_api_proto_rawDescGZIP(), []int{8} } func (x *CreatePipelineRequest) GetConfig() *Pipeline_Config { @@ -810,7 +1122,7 @@ type CreatePipelineResponse struct { func (x *CreatePipelineResponse) Reset() { *x = CreatePipelineResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[6] + mi := &file_api_v1_api_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -823,7 +1135,7 @@ func (x *CreatePipelineResponse) String() string { func (*CreatePipelineResponse) ProtoMessage() {} func (x *CreatePipelineResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[6] + mi := &file_api_v1_api_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -836,7 +1148,7 @@ func (x *CreatePipelineResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use CreatePipelineResponse.ProtoReflect.Descriptor instead. func (*CreatePipelineResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{6} + return file_api_v1_api_proto_rawDescGZIP(), []int{9} } func (x *CreatePipelineResponse) GetPipeline() *Pipeline { @@ -857,7 +1169,7 @@ type GetPipelineRequest struct { func (x *GetPipelineRequest) Reset() { *x = GetPipelineRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[7] + mi := &file_api_v1_api_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -870,7 +1182,7 @@ func (x *GetPipelineRequest) String() string { func (*GetPipelineRequest) ProtoMessage() {} func (x *GetPipelineRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[7] + mi := &file_api_v1_api_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -883,7 +1195,7 @@ func (x *GetPipelineRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetPipelineRequest.ProtoReflect.Descriptor instead. func (*GetPipelineRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{7} + return file_api_v1_api_proto_rawDescGZIP(), []int{10} } func (x *GetPipelineRequest) GetId() string { @@ -904,7 +1216,7 @@ type GetPipelineResponse struct { func (x *GetPipelineResponse) Reset() { *x = GetPipelineResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[8] + mi := &file_api_v1_api_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -917,7 +1229,7 @@ func (x *GetPipelineResponse) String() string { func (*GetPipelineResponse) ProtoMessage() {} func (x *GetPipelineResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[8] + mi := &file_api_v1_api_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -930,7 +1242,7 @@ func (x *GetPipelineResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetPipelineResponse.ProtoReflect.Descriptor instead. func (*GetPipelineResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{8} + return file_api_v1_api_proto_rawDescGZIP(), []int{11} } func (x *GetPipelineResponse) GetPipeline() *Pipeline { @@ -952,7 +1264,7 @@ type UpdatePipelineRequest struct { func (x *UpdatePipelineRequest) Reset() { *x = UpdatePipelineRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[9] + mi := &file_api_v1_api_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -965,7 +1277,7 @@ func (x *UpdatePipelineRequest) String() string { func (*UpdatePipelineRequest) ProtoMessage() {} func (x *UpdatePipelineRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[9] + mi := &file_api_v1_api_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -978,7 +1290,7 @@ func (x *UpdatePipelineRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdatePipelineRequest.ProtoReflect.Descriptor instead. func (*UpdatePipelineRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{9} + return file_api_v1_api_proto_rawDescGZIP(), []int{12} } func (x *UpdatePipelineRequest) GetId() string { @@ -1006,7 +1318,7 @@ type UpdatePipelineResponse struct { func (x *UpdatePipelineResponse) Reset() { *x = UpdatePipelineResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[10] + mi := &file_api_v1_api_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1019,7 +1331,7 @@ func (x *UpdatePipelineResponse) String() string { func (*UpdatePipelineResponse) ProtoMessage() {} func (x *UpdatePipelineResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[10] + mi := &file_api_v1_api_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1032,7 +1344,7 @@ func (x *UpdatePipelineResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdatePipelineResponse.ProtoReflect.Descriptor instead. func (*UpdatePipelineResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{10} + return file_api_v1_api_proto_rawDescGZIP(), []int{13} } func (x *UpdatePipelineResponse) GetPipeline() *Pipeline { @@ -1053,7 +1365,7 @@ type DeletePipelineRequest struct { func (x *DeletePipelineRequest) Reset() { *x = DeletePipelineRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[11] + mi := &file_api_v1_api_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1066,7 +1378,7 @@ func (x *DeletePipelineRequest) String() string { func (*DeletePipelineRequest) ProtoMessage() {} func (x *DeletePipelineRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[11] + mi := &file_api_v1_api_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1079,7 +1391,7 @@ func (x *DeletePipelineRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use DeletePipelineRequest.ProtoReflect.Descriptor instead. func (*DeletePipelineRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{11} + return file_api_v1_api_proto_rawDescGZIP(), []int{14} } func (x *DeletePipelineRequest) GetId() string { @@ -1098,7 +1410,7 @@ type DeletePipelineResponse struct { func (x *DeletePipelineResponse) Reset() { *x = DeletePipelineResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[12] + mi := &file_api_v1_api_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1111,7 +1423,7 @@ func (x *DeletePipelineResponse) String() string { func (*DeletePipelineResponse) ProtoMessage() {} func (x *DeletePipelineResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[12] + mi := &file_api_v1_api_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1124,7 +1436,7 @@ func (x *DeletePipelineResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use DeletePipelineResponse.ProtoReflect.Descriptor instead. func (*DeletePipelineResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{12} + return file_api_v1_api_proto_rawDescGZIP(), []int{15} } type StartPipelineRequest struct { @@ -1138,7 +1450,7 @@ type StartPipelineRequest struct { func (x *StartPipelineRequest) Reset() { *x = StartPipelineRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[13] + mi := &file_api_v1_api_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1151,7 +1463,7 @@ func (x *StartPipelineRequest) String() string { func (*StartPipelineRequest) ProtoMessage() {} func (x *StartPipelineRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[13] + mi := &file_api_v1_api_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1164,7 +1476,7 @@ func (x *StartPipelineRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use StartPipelineRequest.ProtoReflect.Descriptor instead. func (*StartPipelineRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{13} + return file_api_v1_api_proto_rawDescGZIP(), []int{16} } func (x *StartPipelineRequest) GetId() string { @@ -1183,7 +1495,7 @@ type StartPipelineResponse struct { func (x *StartPipelineResponse) Reset() { *x = StartPipelineResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[14] + mi := &file_api_v1_api_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1196,7 +1508,7 @@ func (x *StartPipelineResponse) String() string { func (*StartPipelineResponse) ProtoMessage() {} func (x *StartPipelineResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[14] + mi := &file_api_v1_api_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1209,7 +1521,7 @@ func (x *StartPipelineResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use StartPipelineResponse.ProtoReflect.Descriptor instead. func (*StartPipelineResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{14} + return file_api_v1_api_proto_rawDescGZIP(), []int{17} } type StopPipelineRequest struct { @@ -1224,7 +1536,7 @@ type StopPipelineRequest struct { func (x *StopPipelineRequest) Reset() { *x = StopPipelineRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[15] + mi := &file_api_v1_api_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1237,7 +1549,7 @@ func (x *StopPipelineRequest) String() string { func (*StopPipelineRequest) ProtoMessage() {} func (x *StopPipelineRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[15] + mi := &file_api_v1_api_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1250,7 +1562,7 @@ func (x *StopPipelineRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use StopPipelineRequest.ProtoReflect.Descriptor instead. func (*StopPipelineRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{15} + return file_api_v1_api_proto_rawDescGZIP(), []int{18} } func (x *StopPipelineRequest) GetId() string { @@ -1276,7 +1588,7 @@ type StopPipelineResponse struct { func (x *StopPipelineResponse) Reset() { *x = StopPipelineResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[16] + mi := &file_api_v1_api_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1289,7 +1601,7 @@ func (x *StopPipelineResponse) String() string { func (*StopPipelineResponse) ProtoMessage() {} func (x *StopPipelineResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[16] + mi := &file_api_v1_api_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1302,7 +1614,7 @@ func (x *StopPipelineResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use StopPipelineResponse.ProtoReflect.Descriptor instead. func (*StopPipelineResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{16} + return file_api_v1_api_proto_rawDescGZIP(), []int{19} } type GetDLQRequest struct { @@ -1316,7 +1628,7 @@ type GetDLQRequest struct { func (x *GetDLQRequest) Reset() { *x = GetDLQRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[17] + mi := &file_api_v1_api_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1329,7 +1641,7 @@ func (x *GetDLQRequest) String() string { func (*GetDLQRequest) ProtoMessage() {} func (x *GetDLQRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[17] + mi := &file_api_v1_api_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1342,7 +1654,7 @@ func (x *GetDLQRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetDLQRequest.ProtoReflect.Descriptor instead. func (*GetDLQRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{17} + return file_api_v1_api_proto_rawDescGZIP(), []int{20} } func (x *GetDLQRequest) GetId() string { @@ -1363,7 +1675,7 @@ type GetDLQResponse struct { func (x *GetDLQResponse) Reset() { *x = GetDLQResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[18] + mi := &file_api_v1_api_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1376,7 +1688,7 @@ func (x *GetDLQResponse) String() string { func (*GetDLQResponse) ProtoMessage() {} func (x *GetDLQResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[18] + mi := &file_api_v1_api_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1389,7 +1701,7 @@ func (x *GetDLQResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetDLQResponse.ProtoReflect.Descriptor instead. func (*GetDLQResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{18} + return file_api_v1_api_proto_rawDescGZIP(), []int{21} } func (x *GetDLQResponse) GetDlq() *Pipeline_DLQ { @@ -1411,7 +1723,7 @@ type UpdateDLQRequest struct { func (x *UpdateDLQRequest) Reset() { *x = UpdateDLQRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[19] + mi := &file_api_v1_api_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1424,7 +1736,7 @@ func (x *UpdateDLQRequest) String() string { func (*UpdateDLQRequest) ProtoMessage() {} func (x *UpdateDLQRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[19] + mi := &file_api_v1_api_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1437,7 +1749,7 @@ func (x *UpdateDLQRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateDLQRequest.ProtoReflect.Descriptor instead. func (*UpdateDLQRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{19} + return file_api_v1_api_proto_rawDescGZIP(), []int{22} } func (x *UpdateDLQRequest) GetId() string { @@ -1465,7 +1777,7 @@ type UpdateDLQResponse struct { func (x *UpdateDLQResponse) Reset() { *x = UpdateDLQResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[20] + mi := &file_api_v1_api_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1478,7 +1790,7 @@ func (x *UpdateDLQResponse) String() string { func (*UpdateDLQResponse) ProtoMessage() {} func (x *UpdateDLQResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[20] + mi := &file_api_v1_api_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1491,7 +1803,7 @@ func (x *UpdateDLQResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateDLQResponse.ProtoReflect.Descriptor instead. func (*UpdateDLQResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{20} + return file_api_v1_api_proto_rawDescGZIP(), []int{23} } func (x *UpdateDLQResponse) GetDlq() *Pipeline_DLQ { @@ -1512,7 +1824,7 @@ type ExportPipelineRequest struct { func (x *ExportPipelineRequest) Reset() { *x = ExportPipelineRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[21] + mi := &file_api_v1_api_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1525,7 +1837,7 @@ func (x *ExportPipelineRequest) String() string { func (*ExportPipelineRequest) ProtoMessage() {} func (x *ExportPipelineRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[21] + mi := &file_api_v1_api_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1538,7 +1850,7 @@ func (x *ExportPipelineRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ExportPipelineRequest.ProtoReflect.Descriptor instead. func (*ExportPipelineRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{21} + return file_api_v1_api_proto_rawDescGZIP(), []int{24} } func (x *ExportPipelineRequest) GetId() string { @@ -1559,7 +1871,7 @@ type ExportPipelineResponse struct { func (x *ExportPipelineResponse) Reset() { *x = ExportPipelineResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[22] + mi := &file_api_v1_api_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1572,7 +1884,7 @@ func (x *ExportPipelineResponse) String() string { func (*ExportPipelineResponse) ProtoMessage() {} func (x *ExportPipelineResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[22] + mi := &file_api_v1_api_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1585,7 +1897,7 @@ func (x *ExportPipelineResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ExportPipelineResponse.ProtoReflect.Descriptor instead. func (*ExportPipelineResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{22} + return file_api_v1_api_proto_rawDescGZIP(), []int{25} } func (x *ExportPipelineResponse) GetPipeline() *Pipeline { @@ -1606,7 +1918,7 @@ type ImportPipelineRequest struct { func (x *ImportPipelineRequest) Reset() { *x = ImportPipelineRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[23] + mi := &file_api_v1_api_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1619,7 +1931,7 @@ func (x *ImportPipelineRequest) String() string { func (*ImportPipelineRequest) ProtoMessage() {} func (x *ImportPipelineRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[23] + mi := &file_api_v1_api_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1632,7 +1944,7 @@ func (x *ImportPipelineRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ImportPipelineRequest.ProtoReflect.Descriptor instead. func (*ImportPipelineRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{23} + return file_api_v1_api_proto_rawDescGZIP(), []int{26} } func (x *ImportPipelineRequest) GetPipeline() *Pipeline { @@ -1653,7 +1965,7 @@ type ImportPipelineResponse struct { func (x *ImportPipelineResponse) Reset() { *x = ImportPipelineResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[24] + mi := &file_api_v1_api_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1666,7 +1978,7 @@ func (x *ImportPipelineResponse) String() string { func (*ImportPipelineResponse) ProtoMessage() {} func (x *ImportPipelineResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[24] + mi := &file_api_v1_api_proto_msgTypes[27] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1679,7 +1991,7 @@ func (x *ImportPipelineResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ImportPipelineResponse.ProtoReflect.Descriptor instead. func (*ImportPipelineResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{24} + return file_api_v1_api_proto_rawDescGZIP(), []int{27} } func (x *ImportPipelineResponse) GetPipeline() *Pipeline { @@ -1695,7 +2007,12 @@ type CreateConnectorRequest struct { unknownFields protoimpl.UnknownFields Type Connector_Type `protobuf:"varint,1,opt,name=type,proto3,enum=api.v1.Connector_Type" json:"type,omitempty"` - // Plugin name is the name of the builtin plugin, or the absolute path of a standalone plugin. + // Used to reference a plugin. Its format is as follows: + // [PLUGIN-TYPE:]PLUGIN-NAME[@VERSION] + // PLUGIN-TYPE: One of: builtin, standalone or any (default). + // PLUGIN-NAME: The name of the plugin as specified in the plugin specifications. + // VERSION: The plugin version as specified in the plugin specifications or latest (default). + // For more information, see: https://conduit.io/docs/connectors/referencing/ Plugin string `protobuf:"bytes,2,opt,name=plugin,proto3" json:"plugin,omitempty"` // ID of the pipeline to which the connector will get attached. PipelineId string `protobuf:"bytes,3,opt,name=pipeline_id,json=pipelineId,proto3" json:"pipeline_id,omitempty"` @@ -1705,7 +2022,7 @@ type CreateConnectorRequest struct { func (x *CreateConnectorRequest) Reset() { *x = CreateConnectorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[25] + mi := &file_api_v1_api_proto_msgTypes[28] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1718,7 +2035,7 @@ func (x *CreateConnectorRequest) String() string { func (*CreateConnectorRequest) ProtoMessage() {} func (x *CreateConnectorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[25] + mi := &file_api_v1_api_proto_msgTypes[28] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1731,7 +2048,7 @@ func (x *CreateConnectorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use CreateConnectorRequest.ProtoReflect.Descriptor instead. func (*CreateConnectorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{25} + return file_api_v1_api_proto_rawDescGZIP(), []int{28} } func (x *CreateConnectorRequest) GetType() Connector_Type { @@ -1773,7 +2090,7 @@ type CreateConnectorResponse struct { func (x *CreateConnectorResponse) Reset() { *x = CreateConnectorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[26] + mi := &file_api_v1_api_proto_msgTypes[29] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1786,7 +2103,7 @@ func (x *CreateConnectorResponse) String() string { func (*CreateConnectorResponse) ProtoMessage() {} func (x *CreateConnectorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[26] + mi := &file_api_v1_api_proto_msgTypes[29] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1799,7 +2116,7 @@ func (x *CreateConnectorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use CreateConnectorResponse.ProtoReflect.Descriptor instead. func (*CreateConnectorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{26} + return file_api_v1_api_proto_rawDescGZIP(), []int{29} } func (x *CreateConnectorResponse) GetConnector() *Connector { @@ -1824,7 +2141,7 @@ type ValidateConnectorRequest struct { func (x *ValidateConnectorRequest) Reset() { *x = ValidateConnectorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[27] + mi := &file_api_v1_api_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1837,7 +2154,7 @@ func (x *ValidateConnectorRequest) String() string { func (*ValidateConnectorRequest) ProtoMessage() {} func (x *ValidateConnectorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[27] + mi := &file_api_v1_api_proto_msgTypes[30] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1850,7 +2167,7 @@ func (x *ValidateConnectorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ValidateConnectorRequest.ProtoReflect.Descriptor instead. func (*ValidateConnectorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{27} + return file_api_v1_api_proto_rawDescGZIP(), []int{30} } func (x *ValidateConnectorRequest) GetType() Connector_Type { @@ -1883,7 +2200,7 @@ type ValidateConnectorResponse struct { func (x *ValidateConnectorResponse) Reset() { *x = ValidateConnectorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[28] + mi := &file_api_v1_api_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1896,7 +2213,7 @@ func (x *ValidateConnectorResponse) String() string { func (*ValidateConnectorResponse) ProtoMessage() {} func (x *ValidateConnectorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[28] + mi := &file_api_v1_api_proto_msgTypes[31] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1909,7 +2226,7 @@ func (x *ValidateConnectorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ValidateConnectorResponse.ProtoReflect.Descriptor instead. func (*ValidateConnectorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{28} + return file_api_v1_api_proto_rawDescGZIP(), []int{31} } type ListConnectorsRequest struct { @@ -1923,7 +2240,7 @@ type ListConnectorsRequest struct { func (x *ListConnectorsRequest) Reset() { *x = ListConnectorsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[29] + mi := &file_api_v1_api_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1936,7 +2253,7 @@ func (x *ListConnectorsRequest) String() string { func (*ListConnectorsRequest) ProtoMessage() {} func (x *ListConnectorsRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[29] + mi := &file_api_v1_api_proto_msgTypes[32] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1949,7 +2266,7 @@ func (x *ListConnectorsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ListConnectorsRequest.ProtoReflect.Descriptor instead. func (*ListConnectorsRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{29} + return file_api_v1_api_proto_rawDescGZIP(), []int{32} } func (x *ListConnectorsRequest) GetPipelineId() string { @@ -1970,7 +2287,7 @@ type ListConnectorsResponse struct { func (x *ListConnectorsResponse) Reset() { *x = ListConnectorsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[30] + mi := &file_api_v1_api_proto_msgTypes[33] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1983,7 +2300,7 @@ func (x *ListConnectorsResponse) String() string { func (*ListConnectorsResponse) ProtoMessage() {} func (x *ListConnectorsResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[30] + mi := &file_api_v1_api_proto_msgTypes[33] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1996,7 +2313,7 @@ func (x *ListConnectorsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ListConnectorsResponse.ProtoReflect.Descriptor instead. func (*ListConnectorsResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{30} + return file_api_v1_api_proto_rawDescGZIP(), []int{33} } func (x *ListConnectorsResponse) GetConnectors() []*Connector { @@ -2017,7 +2334,7 @@ type InspectConnectorRequest struct { func (x *InspectConnectorRequest) Reset() { *x = InspectConnectorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[31] + mi := &file_api_v1_api_proto_msgTypes[34] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2030,7 +2347,7 @@ func (x *InspectConnectorRequest) String() string { func (*InspectConnectorRequest) ProtoMessage() {} func (x *InspectConnectorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[31] + mi := &file_api_v1_api_proto_msgTypes[34] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2043,7 +2360,7 @@ func (x *InspectConnectorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use InspectConnectorRequest.ProtoReflect.Descriptor instead. func (*InspectConnectorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{31} + return file_api_v1_api_proto_rawDescGZIP(), []int{34} } func (x *InspectConnectorRequest) GetId() string { @@ -2058,13 +2375,13 @@ type InspectConnectorResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Record *v1.Record `protobuf:"bytes,1,opt,name=record,proto3" json:"record,omitempty"` + Record *v11.Record `protobuf:"bytes,1,opt,name=record,proto3" json:"record,omitempty"` } func (x *InspectConnectorResponse) Reset() { *x = InspectConnectorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[32] + mi := &file_api_v1_api_proto_msgTypes[35] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2077,7 +2394,7 @@ func (x *InspectConnectorResponse) String() string { func (*InspectConnectorResponse) ProtoMessage() {} func (x *InspectConnectorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[32] + mi := &file_api_v1_api_proto_msgTypes[35] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2090,10 +2407,10 @@ func (x *InspectConnectorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use InspectConnectorResponse.ProtoReflect.Descriptor instead. func (*InspectConnectorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{32} + return file_api_v1_api_proto_rawDescGZIP(), []int{35} } -func (x *InspectConnectorResponse) GetRecord() *v1.Record { +func (x *InspectConnectorResponse) GetRecord() *v11.Record { if x != nil { return x.Record } @@ -2111,7 +2428,7 @@ type GetConnectorRequest struct { func (x *GetConnectorRequest) Reset() { *x = GetConnectorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[33] + mi := &file_api_v1_api_proto_msgTypes[36] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2124,7 +2441,7 @@ func (x *GetConnectorRequest) String() string { func (*GetConnectorRequest) ProtoMessage() {} func (x *GetConnectorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[33] + mi := &file_api_v1_api_proto_msgTypes[36] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2137,7 +2454,7 @@ func (x *GetConnectorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetConnectorRequest.ProtoReflect.Descriptor instead. func (*GetConnectorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{33} + return file_api_v1_api_proto_rawDescGZIP(), []int{36} } func (x *GetConnectorRequest) GetId() string { @@ -2158,7 +2475,7 @@ type GetConnectorResponse struct { func (x *GetConnectorResponse) Reset() { *x = GetConnectorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[34] + mi := &file_api_v1_api_proto_msgTypes[37] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2171,7 +2488,7 @@ func (x *GetConnectorResponse) String() string { func (*GetConnectorResponse) ProtoMessage() {} func (x *GetConnectorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[34] + mi := &file_api_v1_api_proto_msgTypes[37] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2184,7 +2501,7 @@ func (x *GetConnectorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetConnectorResponse.ProtoReflect.Descriptor instead. func (*GetConnectorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{34} + return file_api_v1_api_proto_rawDescGZIP(), []int{37} } func (x *GetConnectorResponse) GetConnector() *Connector { @@ -2206,7 +2523,7 @@ type UpdateConnectorRequest struct { func (x *UpdateConnectorRequest) Reset() { *x = UpdateConnectorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[35] + mi := &file_api_v1_api_proto_msgTypes[38] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2219,7 +2536,7 @@ func (x *UpdateConnectorRequest) String() string { func (*UpdateConnectorRequest) ProtoMessage() {} func (x *UpdateConnectorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[35] + mi := &file_api_v1_api_proto_msgTypes[38] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2232,7 +2549,7 @@ func (x *UpdateConnectorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateConnectorRequest.ProtoReflect.Descriptor instead. func (*UpdateConnectorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{35} + return file_api_v1_api_proto_rawDescGZIP(), []int{38} } func (x *UpdateConnectorRequest) GetId() string { @@ -2260,7 +2577,7 @@ type UpdateConnectorResponse struct { func (x *UpdateConnectorResponse) Reset() { *x = UpdateConnectorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[36] + mi := &file_api_v1_api_proto_msgTypes[39] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2273,7 +2590,7 @@ func (x *UpdateConnectorResponse) String() string { func (*UpdateConnectorResponse) ProtoMessage() {} func (x *UpdateConnectorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[36] + mi := &file_api_v1_api_proto_msgTypes[39] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2286,7 +2603,7 @@ func (x *UpdateConnectorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateConnectorResponse.ProtoReflect.Descriptor instead. func (*UpdateConnectorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{36} + return file_api_v1_api_proto_rawDescGZIP(), []int{39} } func (x *UpdateConnectorResponse) GetConnector() *Connector { @@ -2307,7 +2624,7 @@ type DeleteConnectorRequest struct { func (x *DeleteConnectorRequest) Reset() { *x = DeleteConnectorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[37] + mi := &file_api_v1_api_proto_msgTypes[40] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2320,7 +2637,7 @@ func (x *DeleteConnectorRequest) String() string { func (*DeleteConnectorRequest) ProtoMessage() {} func (x *DeleteConnectorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[37] + mi := &file_api_v1_api_proto_msgTypes[40] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2333,7 +2650,7 @@ func (x *DeleteConnectorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use DeleteConnectorRequest.ProtoReflect.Descriptor instead. func (*DeleteConnectorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{37} + return file_api_v1_api_proto_rawDescGZIP(), []int{40} } func (x *DeleteConnectorRequest) GetId() string { @@ -2352,7 +2669,7 @@ type DeleteConnectorResponse struct { func (x *DeleteConnectorResponse) Reset() { *x = DeleteConnectorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[38] + mi := &file_api_v1_api_proto_msgTypes[41] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2365,7 +2682,7 @@ func (x *DeleteConnectorResponse) String() string { func (*DeleteConnectorResponse) ProtoMessage() {} func (x *DeleteConnectorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[38] + mi := &file_api_v1_api_proto_msgTypes[41] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2378,7 +2695,102 @@ func (x *DeleteConnectorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use DeleteConnectorResponse.ProtoReflect.Descriptor instead. func (*DeleteConnectorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{38} + return file_api_v1_api_proto_rawDescGZIP(), []int{41} +} + +type ListConnectorPluginsRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Regex to filter plugins by name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` +} + +func (x *ListConnectorPluginsRequest) Reset() { + *x = ListConnectorPluginsRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_api_proto_msgTypes[42] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListConnectorPluginsRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListConnectorPluginsRequest) ProtoMessage() {} + +func (x *ListConnectorPluginsRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[42] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListConnectorPluginsRequest.ProtoReflect.Descriptor instead. +func (*ListConnectorPluginsRequest) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{42} +} + +func (x *ListConnectorPluginsRequest) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +type ListConnectorPluginsResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Plugins []*ConnectorPluginSpecifications `protobuf:"bytes,1,rep,name=plugins,proto3" json:"plugins,omitempty"` +} + +func (x *ListConnectorPluginsResponse) Reset() { + *x = ListConnectorPluginsResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_api_proto_msgTypes[43] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ListConnectorPluginsResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ListConnectorPluginsResponse) ProtoMessage() {} + +func (x *ListConnectorPluginsResponse) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[43] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ListConnectorPluginsResponse.ProtoReflect.Descriptor instead. +func (*ListConnectorPluginsResponse) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{43} +} + +func (x *ListConnectorPluginsResponse) GetPlugins() []*ConnectorPluginSpecifications { + if x != nil { + return x.Plugins + } + return nil } type ListProcessorsRequest struct { @@ -2392,7 +2804,7 @@ type ListProcessorsRequest struct { func (x *ListProcessorsRequest) Reset() { *x = ListProcessorsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[39] + mi := &file_api_v1_api_proto_msgTypes[44] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2405,7 +2817,7 @@ func (x *ListProcessorsRequest) String() string { func (*ListProcessorsRequest) ProtoMessage() {} func (x *ListProcessorsRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[39] + mi := &file_api_v1_api_proto_msgTypes[44] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2418,7 +2830,7 @@ func (x *ListProcessorsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ListProcessorsRequest.ProtoReflect.Descriptor instead. func (*ListProcessorsRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{39} + return file_api_v1_api_proto_rawDescGZIP(), []int{44} } func (x *ListProcessorsRequest) GetParentIds() []string { @@ -2439,7 +2851,7 @@ type ListProcessorsResponse struct { func (x *ListProcessorsResponse) Reset() { *x = ListProcessorsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[40] + mi := &file_api_v1_api_proto_msgTypes[45] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2452,7 +2864,7 @@ func (x *ListProcessorsResponse) String() string { func (*ListProcessorsResponse) ProtoMessage() {} func (x *ListProcessorsResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[40] + mi := &file_api_v1_api_proto_msgTypes[45] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2465,7 +2877,7 @@ func (x *ListProcessorsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ListProcessorsResponse.ProtoReflect.Descriptor instead. func (*ListProcessorsResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{40} + return file_api_v1_api_proto_rawDescGZIP(), []int{45} } func (x *ListProcessorsResponse) GetProcessors() []*Processor { @@ -2486,7 +2898,7 @@ type InspectProcessorInRequest struct { func (x *InspectProcessorInRequest) Reset() { *x = InspectProcessorInRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[41] + mi := &file_api_v1_api_proto_msgTypes[46] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2499,7 +2911,7 @@ func (x *InspectProcessorInRequest) String() string { func (*InspectProcessorInRequest) ProtoMessage() {} func (x *InspectProcessorInRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[41] + mi := &file_api_v1_api_proto_msgTypes[46] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2512,7 +2924,7 @@ func (x *InspectProcessorInRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use InspectProcessorInRequest.ProtoReflect.Descriptor instead. func (*InspectProcessorInRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{41} + return file_api_v1_api_proto_rawDescGZIP(), []int{46} } func (x *InspectProcessorInRequest) GetId() string { @@ -2527,13 +2939,13 @@ type InspectProcessorInResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Record *v1.Record `protobuf:"bytes,1,opt,name=record,proto3" json:"record,omitempty"` + Record *v11.Record `protobuf:"bytes,1,opt,name=record,proto3" json:"record,omitempty"` } func (x *InspectProcessorInResponse) Reset() { *x = InspectProcessorInResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[42] + mi := &file_api_v1_api_proto_msgTypes[47] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2546,7 +2958,7 @@ func (x *InspectProcessorInResponse) String() string { func (*InspectProcessorInResponse) ProtoMessage() {} func (x *InspectProcessorInResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[42] + mi := &file_api_v1_api_proto_msgTypes[47] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2559,10 +2971,10 @@ func (x *InspectProcessorInResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use InspectProcessorInResponse.ProtoReflect.Descriptor instead. func (*InspectProcessorInResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{42} + return file_api_v1_api_proto_rawDescGZIP(), []int{47} } -func (x *InspectProcessorInResponse) GetRecord() *v1.Record { +func (x *InspectProcessorInResponse) GetRecord() *v11.Record { if x != nil { return x.Record } @@ -2580,7 +2992,7 @@ type InspectProcessorOutRequest struct { func (x *InspectProcessorOutRequest) Reset() { *x = InspectProcessorOutRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[43] + mi := &file_api_v1_api_proto_msgTypes[48] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2593,7 +3005,7 @@ func (x *InspectProcessorOutRequest) String() string { func (*InspectProcessorOutRequest) ProtoMessage() {} func (x *InspectProcessorOutRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[43] + mi := &file_api_v1_api_proto_msgTypes[48] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2606,7 +3018,7 @@ func (x *InspectProcessorOutRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use InspectProcessorOutRequest.ProtoReflect.Descriptor instead. func (*InspectProcessorOutRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{43} + return file_api_v1_api_proto_rawDescGZIP(), []int{48} } func (x *InspectProcessorOutRequest) GetId() string { @@ -2621,13 +3033,13 @@ type InspectProcessorOutResponse struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Record *v1.Record `protobuf:"bytes,1,opt,name=record,proto3" json:"record,omitempty"` + Record *v11.Record `protobuf:"bytes,1,opt,name=record,proto3" json:"record,omitempty"` } func (x *InspectProcessorOutResponse) Reset() { *x = InspectProcessorOutResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[44] + mi := &file_api_v1_api_proto_msgTypes[49] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2640,7 +3052,7 @@ func (x *InspectProcessorOutResponse) String() string { func (*InspectProcessorOutResponse) ProtoMessage() {} func (x *InspectProcessorOutResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[44] + mi := &file_api_v1_api_proto_msgTypes[49] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2653,10 +3065,10 @@ func (x *InspectProcessorOutResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use InspectProcessorOutResponse.ProtoReflect.Descriptor instead. func (*InspectProcessorOutResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{44} + return file_api_v1_api_proto_rawDescGZIP(), []int{49} } -func (x *InspectProcessorOutResponse) GetRecord() *v1.Record { +func (x *InspectProcessorOutResponse) GetRecord() *v11.Record { if x != nil { return x.Record } @@ -2668,16 +3080,18 @@ type CreateProcessorRequest struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields + // Deprecated: Marked as deprecated in api/v1/api.proto. Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` Parent *Processor_Parent `protobuf:"bytes,3,opt,name=parent,proto3" json:"parent,omitempty"` Config *Processor_Config `protobuf:"bytes,4,opt,name=config,proto3" json:"config,omitempty"` Condition string `protobuf:"bytes,5,opt,name=condition,proto3" json:"condition,omitempty"` + Plugin string `protobuf:"bytes,6,opt,name=plugin,proto3" json:"plugin,omitempty"` } func (x *CreateProcessorRequest) Reset() { *x = CreateProcessorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[45] + mi := &file_api_v1_api_proto_msgTypes[50] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2690,7 +3104,7 @@ func (x *CreateProcessorRequest) String() string { func (*CreateProcessorRequest) ProtoMessage() {} func (x *CreateProcessorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[45] + mi := &file_api_v1_api_proto_msgTypes[50] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2703,9 +3117,10 @@ func (x *CreateProcessorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use CreateProcessorRequest.ProtoReflect.Descriptor instead. func (*CreateProcessorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{45} + return file_api_v1_api_proto_rawDescGZIP(), []int{50} } +// Deprecated: Marked as deprecated in api/v1/api.proto. func (x *CreateProcessorRequest) GetType() string { if x != nil { return x.Type @@ -2734,6 +3149,13 @@ func (x *CreateProcessorRequest) GetCondition() string { return "" } +func (x *CreateProcessorRequest) GetPlugin() string { + if x != nil { + return x.Plugin + } + return "" +} + type CreateProcessorResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -2745,7 +3167,7 @@ type CreateProcessorResponse struct { func (x *CreateProcessorResponse) Reset() { *x = CreateProcessorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[46] + mi := &file_api_v1_api_proto_msgTypes[51] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2758,7 +3180,7 @@ func (x *CreateProcessorResponse) String() string { func (*CreateProcessorResponse) ProtoMessage() {} func (x *CreateProcessorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[46] + mi := &file_api_v1_api_proto_msgTypes[51] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2771,7 +3193,7 @@ func (x *CreateProcessorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use CreateProcessorResponse.ProtoReflect.Descriptor instead. func (*CreateProcessorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{46} + return file_api_v1_api_proto_rawDescGZIP(), []int{51} } func (x *CreateProcessorResponse) GetProcessor() *Processor { @@ -2792,7 +3214,7 @@ type GetProcessorRequest struct { func (x *GetProcessorRequest) Reset() { *x = GetProcessorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[47] + mi := &file_api_v1_api_proto_msgTypes[52] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2805,7 +3227,7 @@ func (x *GetProcessorRequest) String() string { func (*GetProcessorRequest) ProtoMessage() {} func (x *GetProcessorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[47] + mi := &file_api_v1_api_proto_msgTypes[52] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2818,7 +3240,7 @@ func (x *GetProcessorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetProcessorRequest.ProtoReflect.Descriptor instead. func (*GetProcessorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{47} + return file_api_v1_api_proto_rawDescGZIP(), []int{52} } func (x *GetProcessorRequest) GetId() string { @@ -2839,7 +3261,7 @@ type GetProcessorResponse struct { func (x *GetProcessorResponse) Reset() { *x = GetProcessorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[48] + mi := &file_api_v1_api_proto_msgTypes[53] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2852,7 +3274,7 @@ func (x *GetProcessorResponse) String() string { func (*GetProcessorResponse) ProtoMessage() {} func (x *GetProcessorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[48] + mi := &file_api_v1_api_proto_msgTypes[53] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2865,7 +3287,7 @@ func (x *GetProcessorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetProcessorResponse.ProtoReflect.Descriptor instead. func (*GetProcessorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{48} + return file_api_v1_api_proto_rawDescGZIP(), []int{53} } func (x *GetProcessorResponse) GetProcessor() *Processor { @@ -2887,7 +3309,7 @@ type UpdateProcessorRequest struct { func (x *UpdateProcessorRequest) Reset() { *x = UpdateProcessorRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[49] + mi := &file_api_v1_api_proto_msgTypes[54] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2900,7 +3322,7 @@ func (x *UpdateProcessorRequest) String() string { func (*UpdateProcessorRequest) ProtoMessage() {} func (x *UpdateProcessorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[49] + mi := &file_api_v1_api_proto_msgTypes[54] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2913,7 +3335,7 @@ func (x *UpdateProcessorRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateProcessorRequest.ProtoReflect.Descriptor instead. func (*UpdateProcessorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{49} + return file_api_v1_api_proto_rawDescGZIP(), []int{54} } func (x *UpdateProcessorRequest) GetId() string { @@ -2941,7 +3363,7 @@ type UpdateProcessorResponse struct { func (x *UpdateProcessorResponse) Reset() { *x = UpdateProcessorResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[50] + mi := &file_api_v1_api_proto_msgTypes[55] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2954,7 +3376,7 @@ func (x *UpdateProcessorResponse) String() string { func (*UpdateProcessorResponse) ProtoMessage() {} func (x *UpdateProcessorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[50] + mi := &file_api_v1_api_proto_msgTypes[55] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2967,41 +3389,127 @@ func (x *UpdateProcessorResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use UpdateProcessorResponse.ProtoReflect.Descriptor instead. func (*UpdateProcessorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{50} + return file_api_v1_api_proto_rawDescGZIP(), []int{55} +} + +func (x *UpdateProcessorResponse) GetProcessor() *Processor { + if x != nil { + return x.Processor + } + return nil +} + +type DeleteProcessorRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` +} + +func (x *DeleteProcessorRequest) Reset() { + *x = DeleteProcessorRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_api_proto_msgTypes[56] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteProcessorRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteProcessorRequest) ProtoMessage() {} + +func (x *DeleteProcessorRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[56] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DeleteProcessorRequest.ProtoReflect.Descriptor instead. +func (*DeleteProcessorRequest) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{56} +} + +func (x *DeleteProcessorRequest) GetId() string { + if x != nil { + return x.Id + } + return "" +} + +type DeleteProcessorResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *DeleteProcessorResponse) Reset() { + *x = DeleteProcessorResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_api_v1_api_proto_msgTypes[57] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DeleteProcessorResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DeleteProcessorResponse) ProtoMessage() {} + +func (x *DeleteProcessorResponse) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[57] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) } -func (x *UpdateProcessorResponse) GetProcessor() *Processor { - if x != nil { - return x.Processor - } - return nil +// Deprecated: Use DeleteProcessorResponse.ProtoReflect.Descriptor instead. +func (*DeleteProcessorResponse) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{57} } -type DeleteProcessorRequest struct { +type ListProcessorPluginsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + // Regex to filter plugins by name. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` } -func (x *DeleteProcessorRequest) Reset() { - *x = DeleteProcessorRequest{} +func (x *ListProcessorPluginsRequest) Reset() { + *x = ListProcessorPluginsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[51] + mi := &file_api_v1_api_proto_msgTypes[58] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } -func (x *DeleteProcessorRequest) String() string { +func (x *ListProcessorPluginsRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*DeleteProcessorRequest) ProtoMessage() {} +func (*ListProcessorPluginsRequest) ProtoMessage() {} -func (x *DeleteProcessorRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[51] +func (x *ListProcessorPluginsRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[58] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3012,41 +3520,43 @@ func (x *DeleteProcessorRequest) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use DeleteProcessorRequest.ProtoReflect.Descriptor instead. -func (*DeleteProcessorRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{51} +// Deprecated: Use ListProcessorPluginsRequest.ProtoReflect.Descriptor instead. +func (*ListProcessorPluginsRequest) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{58} } -func (x *DeleteProcessorRequest) GetId() string { +func (x *ListProcessorPluginsRequest) GetName() string { if x != nil { - return x.Id + return x.Name } return "" } -type DeleteProcessorResponse struct { +type ListProcessorPluginsResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields + + Plugins []*ProcessorPluginSpecifications `protobuf:"bytes,1,rep,name=plugins,proto3" json:"plugins,omitempty"` } -func (x *DeleteProcessorResponse) Reset() { - *x = DeleteProcessorResponse{} +func (x *ListProcessorPluginsResponse) Reset() { + *x = ListProcessorPluginsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[52] + mi := &file_api_v1_api_proto_msgTypes[59] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } } -func (x *DeleteProcessorResponse) String() string { +func (x *ListProcessorPluginsResponse) String() string { return protoimpl.X.MessageStringOf(x) } -func (*DeleteProcessorResponse) ProtoMessage() {} +func (*ListProcessorPluginsResponse) ProtoMessage() {} -func (x *DeleteProcessorResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[52] +func (x *ListProcessorPluginsResponse) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_api_proto_msgTypes[59] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3057,9 +3567,16 @@ func (x *DeleteProcessorResponse) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use DeleteProcessorResponse.ProtoReflect.Descriptor instead. -func (*DeleteProcessorResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{52} +// Deprecated: Use ListProcessorPluginsResponse.ProtoReflect.Descriptor instead. +func (*ListProcessorPluginsResponse) Descriptor() ([]byte, []int) { + return file_api_v1_api_proto_rawDescGZIP(), []int{59} +} + +func (x *ListProcessorPluginsResponse) GetPlugins() []*ProcessorPluginSpecifications { + if x != nil { + return x.Plugins + } + return nil } type GetInfoRequest struct { @@ -3071,7 +3588,7 @@ type GetInfoRequest struct { func (x *GetInfoRequest) Reset() { *x = GetInfoRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[53] + mi := &file_api_v1_api_proto_msgTypes[60] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3084,7 +3601,7 @@ func (x *GetInfoRequest) String() string { func (*GetInfoRequest) ProtoMessage() {} func (x *GetInfoRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[53] + mi := &file_api_v1_api_proto_msgTypes[60] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3097,7 +3614,7 @@ func (x *GetInfoRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetInfoRequest.ProtoReflect.Descriptor instead. func (*GetInfoRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{53} + return file_api_v1_api_proto_rawDescGZIP(), []int{60} } type GetInfoResponse struct { @@ -3111,7 +3628,7 @@ type GetInfoResponse struct { func (x *GetInfoResponse) Reset() { *x = GetInfoResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[54] + mi := &file_api_v1_api_proto_msgTypes[61] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3124,7 +3641,7 @@ func (x *GetInfoResponse) String() string { func (*GetInfoResponse) ProtoMessage() {} func (x *GetInfoResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[54] + mi := &file_api_v1_api_proto_msgTypes[61] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3137,7 +3654,7 @@ func (x *GetInfoResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetInfoResponse.ProtoReflect.Descriptor instead. func (*GetInfoResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{54} + return file_api_v1_api_proto_rawDescGZIP(), []int{61} } func (x *GetInfoResponse) GetInfo() *Info { @@ -3160,7 +3677,7 @@ type Info struct { func (x *Info) Reset() { *x = Info{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[55] + mi := &file_api_v1_api_proto_msgTypes[62] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3173,7 +3690,7 @@ func (x *Info) String() string { func (*Info) ProtoMessage() {} func (x *Info) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[55] + mi := &file_api_v1_api_proto_msgTypes[62] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3186,7 +3703,7 @@ func (x *Info) ProtoReflect() protoreflect.Message { // Deprecated: Use Info.ProtoReflect.Descriptor instead. func (*Info) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{55} + return file_api_v1_api_proto_rawDescGZIP(), []int{62} } func (x *Info) GetVersion() string { @@ -3210,6 +3727,9 @@ func (x *Info) GetArch() string { return "" } +// Deprecated: use ConnectorService.ListConnectorPlugins instead. +// +// Deprecated: Marked as deprecated in api/v1/api.proto. type ListPluginsRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -3222,7 +3742,7 @@ type ListPluginsRequest struct { func (x *ListPluginsRequest) Reset() { *x = ListPluginsRequest{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[56] + mi := &file_api_v1_api_proto_msgTypes[63] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3235,7 +3755,7 @@ func (x *ListPluginsRequest) String() string { func (*ListPluginsRequest) ProtoMessage() {} func (x *ListPluginsRequest) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[56] + mi := &file_api_v1_api_proto_msgTypes[63] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3248,7 +3768,7 @@ func (x *ListPluginsRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ListPluginsRequest.ProtoReflect.Descriptor instead. func (*ListPluginsRequest) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{56} + return file_api_v1_api_proto_rawDescGZIP(), []int{63} } func (x *ListPluginsRequest) GetName() string { @@ -3258,6 +3778,9 @@ func (x *ListPluginsRequest) GetName() string { return "" } +// Deprecated: use ConnectorService.ListConnectorPlugins instead. +// +// Deprecated: Marked as deprecated in api/v1/api.proto. type ListPluginsResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -3269,7 +3792,7 @@ type ListPluginsResponse struct { func (x *ListPluginsResponse) Reset() { *x = ListPluginsResponse{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[57] + mi := &file_api_v1_api_proto_msgTypes[64] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3282,7 +3805,7 @@ func (x *ListPluginsResponse) String() string { func (*ListPluginsResponse) ProtoMessage() {} func (x *ListPluginsResponse) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[57] + mi := &file_api_v1_api_proto_msgTypes[64] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3295,7 +3818,7 @@ func (x *ListPluginsResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ListPluginsResponse.ProtoReflect.Descriptor instead. func (*ListPluginsResponse) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{57} + return file_api_v1_api_proto_rawDescGZIP(), []int{64} } func (x *ListPluginsResponse) GetPlugins() []*PluginSpecifications { @@ -3305,101 +3828,6 @@ func (x *ListPluginsResponse) GetPlugins() []*PluginSpecifications { return nil } -type PluginSpecifications struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"` - Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` - Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"` - Author string `protobuf:"bytes,5,opt,name=author,proto3" json:"author,omitempty"` - DestinationParams map[string]*PluginSpecifications_Parameter `protobuf:"bytes,6,rep,name=destination_params,json=destinationParams,proto3" json:"destination_params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - SourceParams map[string]*PluginSpecifications_Parameter `protobuf:"bytes,7,rep,name=source_params,json=sourceParams,proto3" json:"source_params,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *PluginSpecifications) Reset() { - *x = PluginSpecifications{} - if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[58] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *PluginSpecifications) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*PluginSpecifications) ProtoMessage() {} - -func (x *PluginSpecifications) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[58] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use PluginSpecifications.ProtoReflect.Descriptor instead. -func (*PluginSpecifications) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{58} -} - -func (x *PluginSpecifications) GetName() string { - if x != nil { - return x.Name - } - return "" -} - -func (x *PluginSpecifications) GetSummary() string { - if x != nil { - return x.Summary - } - return "" -} - -func (x *PluginSpecifications) GetDescription() string { - if x != nil { - return x.Description - } - return "" -} - -func (x *PluginSpecifications) GetVersion() string { - if x != nil { - return x.Version - } - return "" -} - -func (x *PluginSpecifications) GetAuthor() string { - if x != nil { - return x.Author - } - return "" -} - -func (x *PluginSpecifications) GetDestinationParams() map[string]*PluginSpecifications_Parameter { - if x != nil { - return x.DestinationParams - } - return nil -} - -func (x *PluginSpecifications) GetSourceParams() map[string]*PluginSpecifications_Parameter { - if x != nil { - return x.SourceParams - } - return nil -} - type Pipeline_State struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -3413,7 +3841,7 @@ type Pipeline_State struct { func (x *Pipeline_State) Reset() { *x = Pipeline_State{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[59] + mi := &file_api_v1_api_proto_msgTypes[65] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3426,7 +3854,7 @@ func (x *Pipeline_State) String() string { func (*Pipeline_State) ProtoMessage() {} func (x *Pipeline_State) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[59] + mi := &file_api_v1_api_proto_msgTypes[65] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3468,7 +3896,7 @@ type Pipeline_Config struct { func (x *Pipeline_Config) Reset() { *x = Pipeline_Config{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[60] + mi := &file_api_v1_api_proto_msgTypes[66] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3481,7 +3909,7 @@ func (x *Pipeline_Config) String() string { func (*Pipeline_Config) ProtoMessage() {} func (x *Pipeline_Config) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[60] + mi := &file_api_v1_api_proto_msgTypes[66] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3534,7 +3962,7 @@ type Pipeline_DLQ struct { func (x *Pipeline_DLQ) Reset() { *x = Pipeline_DLQ{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[61] + mi := &file_api_v1_api_proto_msgTypes[67] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3547,7 +3975,7 @@ func (x *Pipeline_DLQ) String() string { func (*Pipeline_DLQ) ProtoMessage() {} func (x *Pipeline_DLQ) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[61] + mi := &file_api_v1_api_proto_msgTypes[67] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3602,7 +4030,7 @@ type Connector_SourceState struct { func (x *Connector_SourceState) Reset() { *x = Connector_SourceState{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[63] + mi := &file_api_v1_api_proto_msgTypes[69] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3615,7 +4043,7 @@ func (x *Connector_SourceState) String() string { func (*Connector_SourceState) ProtoMessage() {} func (x *Connector_SourceState) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[63] + mi := &file_api_v1_api_proto_msgTypes[69] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3649,7 +4077,7 @@ type Connector_DestinationState struct { func (x *Connector_DestinationState) Reset() { *x = Connector_DestinationState{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[64] + mi := &file_api_v1_api_proto_msgTypes[70] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3662,7 +4090,7 @@ func (x *Connector_DestinationState) String() string { func (*Connector_DestinationState) ProtoMessage() {} func (x *Connector_DestinationState) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[64] + mi := &file_api_v1_api_proto_msgTypes[70] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3697,7 +4125,7 @@ type Connector_Config struct { func (x *Connector_Config) Reset() { *x = Connector_Config{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[65] + mi := &file_api_v1_api_proto_msgTypes[71] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3710,7 +4138,7 @@ func (x *Connector_Config) String() string { func (*Connector_Config) ProtoMessage() {} func (x *Connector_Config) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[65] + mi := &file_api_v1_api_proto_msgTypes[71] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3752,7 +4180,7 @@ type Processor_Parent struct { func (x *Processor_Parent) Reset() { *x = Processor_Parent{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[68] + mi := &file_api_v1_api_proto_msgTypes[74] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3765,7 +4193,7 @@ func (x *Processor_Parent) String() string { func (*Processor_Parent) ProtoMessage() {} func (x *Processor_Parent) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[68] + mi := &file_api_v1_api_proto_msgTypes[74] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3807,7 +4235,7 @@ type Processor_Config struct { func (x *Processor_Config) Reset() { *x = Processor_Config{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[69] + mi := &file_api_v1_api_proto_msgTypes[75] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3820,7 +4248,7 @@ func (x *Processor_Config) String() string { func (*Processor_Config) ProtoMessage() {} func (x *Processor_Config) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[69] + mi := &file_api_v1_api_proto_msgTypes[75] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3850,6 +4278,9 @@ func (x *Processor_Config) GetWorkers() int32 { return 0 } +// Deprecated: use config.v1.Parameter instead. +// +// Deprecated: Marked as deprecated in api/v1/api.proto. type PluginSpecifications_Parameter struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -3864,7 +4295,7 @@ type PluginSpecifications_Parameter struct { func (x *PluginSpecifications_Parameter) Reset() { *x = PluginSpecifications_Parameter{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[71] + mi := &file_api_v1_api_proto_msgTypes[80] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3877,7 +4308,7 @@ func (x *PluginSpecifications_Parameter) String() string { func (*PluginSpecifications_Parameter) ProtoMessage() {} func (x *PluginSpecifications_Parameter) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[71] + mi := &file_api_v1_api_proto_msgTypes[80] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3890,7 +4321,7 @@ func (x *PluginSpecifications_Parameter) ProtoReflect() protoreflect.Message { // Deprecated: Use PluginSpecifications_Parameter.ProtoReflect.Descriptor instead. func (*PluginSpecifications_Parameter) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{58, 0} + return file_api_v1_api_proto_rawDescGZIP(), []int{5, 0} } func (x *PluginSpecifications_Parameter) GetDescription() string { @@ -3921,7 +4352,9 @@ func (x *PluginSpecifications_Parameter) GetValidations() []*PluginSpecification return nil } -// Validation to be made on the parameter. +// Deprecated: use config.v1.Validation instead. +// +// Deprecated: Marked as deprecated in api/v1/api.proto. type PluginSpecifications_Parameter_Validation struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -3936,7 +4369,7 @@ type PluginSpecifications_Parameter_Validation struct { func (x *PluginSpecifications_Parameter_Validation) Reset() { *x = PluginSpecifications_Parameter_Validation{} if protoimpl.UnsafeEnabled { - mi := &file_api_v1_api_proto_msgTypes[74] + mi := &file_api_v1_api_proto_msgTypes[83] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -3949,7 +4382,7 @@ func (x *PluginSpecifications_Parameter_Validation) String() string { func (*PluginSpecifications_Parameter_Validation) ProtoMessage() {} func (x *PluginSpecifications_Parameter_Validation) ProtoReflect() protoreflect.Message { - mi := &file_api_v1_api_proto_msgTypes[74] + mi := &file_api_v1_api_proto_msgTypes[83] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -3962,7 +4395,7 @@ func (x *PluginSpecifications_Parameter_Validation) ProtoReflect() protoreflect. // Deprecated: Use PluginSpecifications_Parameter_Validation.ProtoReflect.Descriptor instead. func (*PluginSpecifications_Parameter_Validation) Descriptor() ([]byte, []int) { - return file_api_v1_api_proto_rawDescGZIP(), []int{58, 0, 0} + return file_api_v1_api_proto_rawDescGZIP(), []int{5, 0, 0} } func (x *PluginSpecifications_Parameter_Validation) GetType() PluginSpecifications_Parameter_Validation_Type { @@ -3985,655 +4418,800 @@ var file_api_v1_api_proto_rawDesc = []byte{ 0x0a, 0x10, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x61, 0x70, 0x69, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x1a, 0x18, 0x6f, 0x70, 0x65, 0x6e, 0x63, 0x64, 0x63, 0x2f, 0x76, 0x31, 0x2f, 0x6f, 0x70, 0x65, 0x6e, 0x63, 0x64, 0x63, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, - 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, - 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, - 0x6f, 0x70, 0x65, 0x6e, 0x61, 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xa9, 0x06, 0x0a, 0x08, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, - 0x41, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2c, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, - 0x74, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x28, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, - 0x03, 0x52, 0x0c, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x49, 0x64, 0x73, 0x12, - 0x28, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x73, - 0x18, 0x06, 0x20, 0x03, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x0c, 0x70, 0x72, 0x6f, - 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x64, 0x73, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, - 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, - 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, - 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, - 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2f, 0x76, 0x31, 0x2f, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, + 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, + 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, + 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x63, 0x2d, 0x67, 0x65, 0x6e, 0x2d, 0x6f, 0x70, 0x65, 0x6e, 0x61, + 0x70, 0x69, 0x76, 0x32, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x61, 0x6e, 0x6e, + 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, + 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0xa9, 0x06, 0x0a, 0x08, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x13, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x02, 0x69, + 0x64, 0x12, 0x2c, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, + 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x12, 0x28, 0x0a, 0x0d, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x5f, 0x69, 0x64, + 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x0c, 0x63, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x49, 0x64, 0x73, 0x12, 0x28, 0x0a, 0x0d, 0x70, 0x72, + 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, + 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, + 0x72, 0x49, 0x64, 0x73, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, + 0x61, 0x74, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, - 0x4e, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, - 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x1a, - 0x3e, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, - 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x1a, - 0xef, 0x01, 0x0a, 0x03, 0x44, 0x4c, 0x51, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, - 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, - 0x3e, 0x0a, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x2e, 0x44, 0x4c, 0x51, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x12, - 0x1f, 0x0a, 0x0b, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x04, 0x52, 0x0a, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x53, 0x69, 0x7a, 0x65, - 0x12, 0x32, 0x0a, 0x15, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x5f, 0x6e, 0x61, 0x63, 0x6b, 0x5f, - 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, - 0x13, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x4e, 0x61, 0x63, 0x6b, 0x54, 0x68, 0x72, 0x65, 0x73, - 0x68, 0x6f, 0x6c, 0x64, 0x1a, 0x3b, 0x0a, 0x0d, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, - 0x01, 0x22, 0x5d, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x53, - 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, - 0x44, 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x52, 0x55, - 0x4e, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, - 0x53, 0x5f, 0x53, 0x54, 0x4f, 0x50, 0x50, 0x45, 0x44, 0x10, 0x02, 0x12, 0x13, 0x0a, 0x0f, 0x53, - 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x44, 0x45, 0x47, 0x52, 0x41, 0x44, 0x45, 0x44, 0x10, 0x03, - 0x22, 0xba, 0x07, 0x0a, 0x09, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x13, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, - 0x02, 0x69, 0x64, 0x12, 0x51, 0x0a, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x2e, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, - 0x74, 0x65, 0x48, 0x00, 0x52, 0x10, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x42, 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x5f, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, - 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x30, 0x0a, 0x06, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2f, 0x0a, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x54, 0x79, - 0x70, 0x65, 0x42, 0x03, 0xe0, 0x41, 0x05, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, - 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, - 0x41, 0x05, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x24, 0x0a, 0x0b, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x42, - 0x03, 0xe0, 0x41, 0x05, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, - 0x12, 0x28, 0x0a, 0x0d, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x5f, 0x69, 0x64, - 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x0c, 0x70, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x64, 0x73, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, - 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, - 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, - 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, - 0x5f, 0x61, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, + 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x08, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, + 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x4e, 0x0a, 0x05, 0x53, 0x74, + 0x61, 0x74, 0x65, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, + 0x61, 0x74, 0x75, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x1a, 0x3e, 0x0a, 0x06, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, + 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xef, 0x01, 0x0a, 0x03, 0x44, + 0x4c, 0x51, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x3e, 0x0a, 0x08, 0x73, 0x65, + 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x44, + 0x4c, 0x51, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x52, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x77, 0x69, + 0x6e, 0x64, 0x6f, 0x77, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, + 0x0a, 0x77, 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x77, + 0x69, 0x6e, 0x64, 0x6f, 0x77, 0x5f, 0x6e, 0x61, 0x63, 0x6b, 0x5f, 0x74, 0x68, 0x72, 0x65, 0x73, + 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x13, 0x77, 0x69, 0x6e, 0x64, + 0x6f, 0x77, 0x4e, 0x61, 0x63, 0x6b, 0x54, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x1a, + 0x3b, 0x0a, 0x0d, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x5d, 0x0a, 0x06, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x16, 0x0a, 0x12, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, + 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x12, + 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x52, 0x55, 0x4e, 0x4e, 0x49, 0x4e, 0x47, + 0x10, 0x01, 0x12, 0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, 0x5f, 0x53, 0x54, 0x4f, + 0x50, 0x50, 0x45, 0x44, 0x10, 0x02, 0x12, 0x13, 0x0a, 0x0f, 0x53, 0x54, 0x41, 0x54, 0x55, 0x53, + 0x5f, 0x44, 0x45, 0x47, 0x52, 0x41, 0x44, 0x45, 0x44, 0x10, 0x03, 0x22, 0xba, 0x07, 0x0a, 0x09, + 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x51, + 0x0a, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x44, 0x65, 0x73, + 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x48, 0x00, 0x52, + 0x10, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, + 0x65, 0x12, 0x42, 0x0a, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, + 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x30, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, + 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, + 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x2f, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, + 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x42, 0x03, 0xe0, + 0x41, 0x05, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, + 0x69, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x05, 0x52, 0x06, 0x70, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x24, 0x0a, 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x5f, 0x69, 0x64, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x05, 0x52, + 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x28, 0x0a, 0x0d, 0x70, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x08, 0x20, 0x03, + 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x0c, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, + 0x6f, 0x72, 0x49, 0x64, 0x73, 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, + 0x5f, 0x61, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, - 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, - 0x1a, 0x29, 0x0a, 0x0b, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, - 0x1a, 0x0a, 0x08, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x08, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xa1, 0x01, 0x0a, 0x10, - 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, - 0x12, 0x4f, 0x0a, 0x09, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x2e, 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x09, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x1a, 0x3c, 0x0a, 0x0e, 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, - 0x9d, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x42, - 0x0a, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, - 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, - 0x67, 0x73, 0x1a, 0x3b, 0x0a, 0x0d, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, - 0x43, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, 0x45, 0x5f, - 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0f, 0x0a, - 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x10, 0x01, 0x12, 0x14, - 0x0a, 0x10, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x44, 0x45, 0x53, 0x54, 0x49, 0x4e, 0x41, 0x54, 0x49, - 0x4f, 0x4e, 0x10, 0x02, 0x42, 0x07, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0xfc, 0x04, - 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, 0x13, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x02, 0x69, 0x64, - 0x12, 0x30, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, - 0x73, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x12, 0x17, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, - 0x42, 0x03, 0xe0, 0x41, 0x05, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x35, 0x0a, 0x06, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x50, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x05, 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, - 0x6e, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, - 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, + 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, - 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x90, 0x01, 0x0a, 0x06, 0x50, 0x61, 0x72, 0x65, 0x6e, - 0x74, 0x12, 0x31, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, - 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, - 0x6f, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x02, 0x69, 0x64, 0x22, 0x43, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, - 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, - 0x10, 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4e, 0x4e, 0x45, - 0x43, 0x54, 0x4f, 0x52, 0x10, 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, - 0x49, 0x50, 0x45, 0x4c, 0x49, 0x4e, 0x45, 0x10, 0x02, 0x1a, 0xa3, 0x01, 0x0a, 0x06, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x12, 0x42, 0x0a, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, - 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x77, 0x6f, 0x72, 0x6b, - 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x77, 0x6f, 0x72, 0x6b, 0x65, - 0x72, 0x73, 0x1a, 0x3b, 0x0a, 0x0d, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x4a, - 0x04, 0x08, 0x02, 0x10, 0x03, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x2a, 0x0a, 0x14, - 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x47, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x2e, 0x0a, 0x09, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x09, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x73, 0x22, 0x48, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x46, 0x0a, 0x16, 0x43, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, - 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x22, 0x24, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x43, 0x0a, 0x13, 0x47, 0x65, 0x74, - 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x58, - 0x0a, 0x15, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, + 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x29, 0x0a, 0x0b, 0x53, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x70, 0x6f, + 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xa1, 0x01, 0x0a, 0x10, 0x44, 0x65, 0x73, 0x74, 0x69, + 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x4f, 0x0a, 0x09, 0x70, + 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x2e, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x74, 0x61, + 0x74, 0x65, 0x2e, 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x09, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3c, 0x0a, 0x0e, + 0x50, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, + 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, + 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x9d, 0x01, 0x0a, 0x06, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x42, 0x0a, 0x08, 0x73, 0x65, 0x74, + 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, + 0x74, 0x72, 0x79, 0x52, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x1a, 0x3b, 0x0a, + 0x0d, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, + 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, + 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x43, 0x0a, 0x04, 0x54, 0x79, + 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, + 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, + 0x5f, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x10, 0x01, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, + 0x45, 0x5f, 0x44, 0x45, 0x53, 0x54, 0x49, 0x4e, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x02, 0x42, + 0x07, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x8c, 0x05, 0x0a, 0x09, 0x50, 0x72, 0x6f, + 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x03, 0x52, 0x02, 0x69, 0x64, 0x12, 0x30, 0x0a, 0x06, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1c, 0x0a, + 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1b, 0x0a, 0x06, 0x70, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x05, + 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x35, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x65, + 0x6e, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x05, 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x12, + 0x39, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, + 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x39, 0x0a, 0x0a, 0x75, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, + 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x75, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x64, 0x41, 0x74, 0x1a, 0x90, 0x01, 0x0a, 0x06, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, + 0x12, 0x31, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1d, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, + 0x72, 0x2e, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, + 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x02, 0x69, 0x64, 0x22, 0x43, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, + 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, + 0x00, 0x12, 0x12, 0x0a, 0x0e, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4e, 0x4e, 0x45, 0x43, + 0x54, 0x4f, 0x52, 0x10, 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x50, 0x49, + 0x50, 0x45, 0x4c, 0x49, 0x4e, 0x45, 0x10, 0x02, 0x1a, 0xa3, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x12, 0x42, 0x0a, 0x08, 0x73, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, + 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x73, + 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x18, 0x0a, 0x07, 0x77, 0x6f, 0x72, 0x6b, 0x65, + 0x72, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, + 0x73, 0x1a, 0x3b, 0x0a, 0x0d, 0x53, 0x65, 0x74, 0x74, 0x69, 0x6e, 0x67, 0x73, 0x45, 0x6e, 0x74, + 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x4a, 0x04, + 0x08, 0x02, 0x10, 0x03, 0x4a, 0x04, 0x08, 0x04, 0x10, 0x05, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x9f, 0x04, 0x0a, 0x1d, 0x43, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, + 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, + 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, + 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, + 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x18, 0x05, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x12, 0x6b, 0x0a, 0x12, 0x64, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, + 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, + 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x44, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x5c, 0x0a, 0x0d, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x37, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x1a, 0x5a, 0x0a, 0x16, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x14, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x1a, 0x55, 0x0a, 0x11, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xcd, 0x02, 0x0a, 0x1d, 0x50, 0x72, + 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, + 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, + 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, + 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, + 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x18, + 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x12, 0x55, 0x0a, + 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, + 0x0b, 0x32, 0x35, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, + 0x73, 0x73, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x73, 0x1a, 0x53, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2a, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xa1, 0x09, 0x0a, 0x14, 0x50, 0x6c, + 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, + 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, + 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, + 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x75, + 0x74, 0x68, 0x6f, 0x72, 0x12, 0x62, 0x0a, 0x12, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x33, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, + 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x44, + 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x53, 0x0a, 0x0d, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x2e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, + 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x53, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, + 0x0c, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x1a, 0xf2, 0x04, + 0x0a, 0x09, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x20, 0x0a, 0x0b, 0x64, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, + 0x07, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x3f, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2e, 0x54, 0x79, + 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x53, 0x0a, 0x0b, 0x76, 0x61, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, + 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x52, 0x0b, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x8b, 0x02, + 0x0a, 0x0a, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x4a, 0x0a, 0x04, + 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x36, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x54, 0x79, + 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x96, + 0x01, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, 0x45, 0x5f, + 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x11, 0x0a, + 0x0d, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x52, 0x45, 0x51, 0x55, 0x49, 0x52, 0x45, 0x44, 0x10, 0x01, + 0x12, 0x15, 0x0a, 0x11, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, + 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x02, 0x12, 0x12, 0x0a, 0x0e, 0x54, 0x59, 0x50, 0x45, 0x5f, + 0x4c, 0x45, 0x53, 0x53, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x03, 0x12, 0x12, 0x0a, 0x0e, 0x54, + 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x43, 0x4c, 0x55, 0x53, 0x49, 0x4f, 0x4e, 0x10, 0x04, 0x12, + 0x12, 0x0a, 0x0e, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x58, 0x43, 0x4c, 0x55, 0x53, 0x49, 0x4f, + 0x4e, 0x10, 0x05, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x52, 0x45, 0x47, 0x45, + 0x58, 0x10, 0x06, 0x1a, 0x02, 0x18, 0x01, 0x3a, 0x02, 0x18, 0x01, 0x22, 0x80, 0x01, 0x0a, 0x04, + 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, + 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x54, 0x59, + 0x50, 0x45, 0x5f, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x54, + 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, + 0x45, 0x5f, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x59, 0x50, + 0x45, 0x5f, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x04, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, + 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, 0x05, 0x12, 0x11, 0x0a, 0x0d, 0x54, 0x59, 0x50, 0x45, 0x5f, + 0x44, 0x55, 0x52, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x06, 0x1a, 0x02, 0x18, 0x01, 0x3a, 0x02, + 0x18, 0x01, 0x1a, 0x6c, 0x0a, 0x16, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3c, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, + 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, + 0x1a, 0x67, 0x0a, 0x11, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, + 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x3a, 0x02, 0x18, 0x01, 0x22, 0x2a, 0x0a, + 0x14, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x47, 0x0a, 0x15, 0x4c, 0x69, 0x73, + 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x09, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, + 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x09, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x73, 0x22, 0x48, 0x0a, 0x15, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x46, 0x0a, 0x16, + 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x24, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x43, 0x0a, 0x13, 0x47, 0x65, + 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, + 0x58, 0x0a, 0x15, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x46, 0x0a, 0x16, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, + 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x22, 0x27, 0x0a, 0x15, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x18, 0x0a, 0x16, 0x44, 0x65, + 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x26, 0x0a, 0x14, 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x17, 0x0a, 0x15, + 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x3b, 0x0a, 0x13, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, + 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, + 0x66, 0x6f, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x72, + 0x63, 0x65, 0x22, 0x16, 0x0a, 0x14, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1f, 0x0a, 0x0d, 0x47, 0x65, + 0x74, 0x44, 0x4c, 0x51, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x38, 0x0a, 0x0e, 0x47, + 0x65, 0x74, 0x44, 0x4c, 0x51, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, + 0x03, 0x64, 0x6c, 0x71, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x44, 0x4c, 0x51, + 0x52, 0x03, 0x64, 0x6c, 0x71, 0x22, 0x4a, 0x0a, 0x10, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, + 0x4c, 0x51, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x26, 0x0a, 0x03, 0x64, 0x6c, 0x71, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, + 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x44, 0x4c, 0x51, 0x52, 0x03, 0x64, 0x6c, + 0x71, 0x22, 0x3b, 0x0a, 0x11, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x4c, 0x51, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x03, 0x64, 0x6c, 0x71, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x44, 0x4c, 0x51, 0x52, 0x03, 0x64, 0x6c, 0x71, 0x22, 0x27, + 0x0a, 0x15, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x2f, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, - 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x46, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x22, 0x27, 0x0a, 0x15, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, - 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x18, 0x0a, 0x16, 0x44, 0x65, 0x6c, - 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x26, 0x0a, 0x14, 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x17, 0x0a, 0x15, 0x53, - 0x74, 0x61, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x3b, 0x0a, 0x13, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x66, - 0x6f, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x05, 0x66, 0x6f, 0x72, 0x63, - 0x65, 0x22, 0x16, 0x0a, 0x14, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1f, 0x0a, 0x0d, 0x47, 0x65, 0x74, - 0x44, 0x4c, 0x51, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x38, 0x0a, 0x0e, 0x47, 0x65, - 0x74, 0x44, 0x4c, 0x51, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x03, - 0x64, 0x6c, 0x71, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x44, 0x4c, 0x51, 0x52, - 0x03, 0x64, 0x6c, 0x71, 0x22, 0x4a, 0x0a, 0x10, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x4c, - 0x51, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x26, 0x0a, 0x03, 0x64, 0x6c, 0x71, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x44, 0x4c, 0x51, 0x52, 0x03, 0x64, 0x6c, 0x71, - 0x22, 0x3b, 0x0a, 0x11, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x4c, 0x51, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x03, 0x64, 0x6c, 0x71, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x2e, 0x44, 0x4c, 0x51, 0x52, 0x03, 0x64, 0x6c, 0x71, 0x22, 0x27, 0x0a, - 0x15, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x46, 0x0a, 0x16, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, + 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x46, 0x0a, 0x16, 0x45, 0x78, 0x70, 0x6f, 0x72, + 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, + 0x45, 0x0a, 0x15, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, + 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x46, 0x0a, 0x16, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x45, - 0x0a, 0x15, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x46, 0x0a, 0x16, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x2c, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x10, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0xaf, 0x01, - 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2a, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, - 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x30, 0x0a, + 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0xaf, + 0x01, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2a, 0x0a, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, + 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x1f, 0x0a, + 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x12, 0x30, + 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x22, 0x4a, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x63, + 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x90, 0x01, 0x0a, + 0x18, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2a, 0x0a, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, + 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x30, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, - 0x4a, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x63, 0x6f, - 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x90, 0x01, 0x0a, 0x18, - 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2a, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x12, 0x30, 0x0a, 0x06, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x2e, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x1b, - 0x0a, 0x19, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x38, 0x0a, 0x15, 0x4c, - 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x49, 0x64, 0x22, 0x4b, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x31, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x73, 0x22, 0x29, 0x0a, 0x17, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x43, 0x6f, 0x6e, + 0x1b, 0x0a, 0x19, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x38, 0x0a, 0x15, + 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x49, 0x64, 0x22, 0x4b, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x31, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x73, 0x22, 0x29, 0x0a, 0x17, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, + 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x46, + 0x0a, 0x18, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x72, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, + 0x6e, 0x63, 0x64, 0x63, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x06, + 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x22, 0x25, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, - 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x46, 0x0a, - 0x18, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x72, 0x65, 0x63, - 0x6f, 0x72, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, - 0x63, 0x64, 0x63, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x06, 0x72, - 0x65, 0x63, 0x6f, 0x72, 0x64, 0x22, 0x25, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, + 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x47, 0x0a, + 0x14, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x09, 0x63, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x5a, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, + 0x12, 0x30, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x22, 0x4a, 0x0a, 0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, + 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x28, + 0x0a, 0x16, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x19, 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x31, 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x5f, 0x0a, 0x1c, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3f, 0x0a, 0x07, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, + 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, + 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, + 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, + 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x22, 0x36, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x50, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x1d, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x09, 0x52, 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x73, 0x22, + 0x4b, 0x0a, 0x16, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, + 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31, 0x0a, 0x0a, 0x70, 0x72, 0x6f, + 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, + 0x52, 0x0a, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x22, 0x2b, 0x0a, 0x19, + 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, + 0x49, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x48, 0x0a, 0x1a, 0x49, 0x6e, 0x73, + 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x6e, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x63, 0x64, + 0x63, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x06, 0x72, 0x65, 0x63, + 0x6f, 0x72, 0x64, 0x22, 0x2c, 0x0a, 0x1a, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, + 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, + 0x64, 0x22, 0x49, 0x0a, 0x1b, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, + 0x65, 0x73, 0x73, 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x12, 0x2a, 0x0a, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x63, 0x64, 0x63, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, + 0x63, 0x6f, 0x72, 0x64, 0x52, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x22, 0xca, 0x01, 0x0a, + 0x16, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x02, 0x18, 0x01, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, + 0x30, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, + 0x6f, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, + 0x74, 0x12, 0x30, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, + 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x06, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x22, 0x4a, 0x0a, 0x17, 0x43, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, + 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x63, + 0x65, 0x73, 0x73, 0x6f, 0x72, 0x22, 0x25, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, + 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x47, 0x0a, 0x14, - 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, + 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, - 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x5a, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, - 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x63, + 0x65, 0x73, 0x73, 0x6f, 0x72, 0x22, 0x5a, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x30, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x22, 0x4a, 0x0a, 0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, - 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x52, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x28, 0x0a, - 0x16, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x67, 0x22, 0x4a, 0x0a, 0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, + 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, + 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, + 0x6f, 0x72, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x22, 0x28, 0x0a, + 0x16, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x19, 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, 0x74, - 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0x36, 0x0a, 0x15, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, - 0x73, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x70, - 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, - 0x09, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x73, 0x22, 0x4b, 0x0a, 0x16, 0x4c, 0x69, - 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x31, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, - 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x0a, 0x70, 0x72, 0x6f, - 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x22, 0x2b, 0x0a, 0x19, 0x49, 0x6e, 0x73, 0x70, 0x65, - 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x6e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x02, 0x69, 0x64, 0x22, 0x48, 0x0a, 0x1a, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, - 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, 0x65, 0x6e, 0x63, 0x64, 0x63, 0x2e, 0x76, 0x31, 0x2e, - 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x22, 0x2c, - 0x0a, 0x1a, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, - 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, - 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x49, 0x0a, 0x1b, - 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, - 0x4f, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2a, 0x0a, 0x06, 0x72, - 0x65, 0x63, 0x6f, 0x72, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6f, 0x70, - 0x65, 0x6e, 0x63, 0x64, 0x63, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, - 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x22, 0xae, 0x01, 0x0a, 0x16, 0x43, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x30, 0x0a, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x65, 0x6e, 0x74, - 0x52, 0x06, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x12, 0x30, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, - 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1c, 0x0a, 0x09, 0x63, 0x6f, - 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x63, - 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x4a, 0x0a, 0x17, 0x43, 0x72, 0x65, 0x61, - 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, - 0x73, 0x73, 0x6f, 0x72, 0x22, 0x25, 0x0a, 0x13, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, - 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x47, 0x0a, 0x14, 0x47, - 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, - 0x73, 0x73, 0x6f, 0x72, 0x22, 0x5a, 0x0a, 0x16, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, - 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x30, - 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, - 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x22, 0x4a, 0x0a, 0x17, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, - 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x09, 0x70, - 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, - 0x72, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x22, 0x28, 0x0a, 0x16, - 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x19, 0x0a, 0x17, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, - 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x10, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x22, 0x33, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x20, 0x0a, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, - 0x66, 0x6f, 0x52, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x22, 0x44, 0x0a, 0x04, 0x49, 0x6e, 0x66, 0x6f, - 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x6f, 0x73, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x6f, 0x73, 0x12, 0x12, 0x0a, 0x04, 0x61, 0x72, - 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x61, 0x72, 0x63, 0x68, 0x22, 0x28, - 0x0a, 0x12, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x4d, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, - 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x36, 0x0a, 0x07, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, - 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, - 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x22, 0x8c, 0x09, 0x0a, 0x14, 0x50, 0x6c, 0x75, 0x67, - 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x20, - 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, - 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x61, 0x75, - 0x74, 0x68, 0x6f, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, - 0x6f, 0x72, 0x12, 0x62, 0x0a, 0x12, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, - 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x44, 0x65, 0x73, - 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x52, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x12, 0x53, 0x0a, 0x0d, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, - 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x53, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0c, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x1a, 0xe1, 0x04, 0x0a, 0x09, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x64, - 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x64, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x12, 0x3f, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x2b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, - 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x2e, 0x54, 0x79, 0x70, 0x65, - 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x53, 0x0a, 0x0b, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, - 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0b, - 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x83, 0x02, 0x0a, 0x0a, - 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x4a, 0x0a, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x36, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x54, 0x79, 0x70, 0x65, - 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x92, 0x01, 0x0a, - 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, - 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x54, - 0x59, 0x50, 0x45, 0x5f, 0x52, 0x45, 0x51, 0x55, 0x49, 0x52, 0x45, 0x44, 0x10, 0x01, 0x12, 0x15, - 0x0a, 0x11, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x47, 0x52, 0x45, 0x41, 0x54, 0x45, 0x52, 0x5f, 0x54, - 0x48, 0x41, 0x4e, 0x10, 0x02, 0x12, 0x12, 0x0a, 0x0e, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x4c, 0x45, - 0x53, 0x53, 0x5f, 0x54, 0x48, 0x41, 0x4e, 0x10, 0x03, 0x12, 0x12, 0x0a, 0x0e, 0x54, 0x59, 0x50, - 0x45, 0x5f, 0x49, 0x4e, 0x43, 0x4c, 0x55, 0x53, 0x49, 0x4f, 0x4e, 0x10, 0x04, 0x12, 0x12, 0x0a, - 0x0e, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x45, 0x58, 0x43, 0x4c, 0x55, 0x53, 0x49, 0x4f, 0x4e, 0x10, - 0x05, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x52, 0x45, 0x47, 0x45, 0x58, 0x10, - 0x06, 0x22, 0x7c, 0x0a, 0x04, 0x54, 0x79, 0x70, 0x65, 0x12, 0x14, 0x0a, 0x10, 0x54, 0x59, 0x50, - 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, - 0x0f, 0x0a, 0x0b, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x01, - 0x12, 0x0c, 0x0a, 0x08, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x49, 0x4e, 0x54, 0x10, 0x02, 0x12, 0x0e, - 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x10, 0x03, 0x12, 0x0d, - 0x0a, 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x04, 0x12, 0x0d, 0x0a, - 0x09, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, 0x05, 0x12, 0x11, 0x0a, 0x0d, - 0x54, 0x59, 0x50, 0x45, 0x5f, 0x44, 0x55, 0x52, 0x41, 0x54, 0x49, 0x4f, 0x4e, 0x10, 0x06, 0x1a, - 0x6c, 0x0a, 0x16, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3c, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x67, 0x0a, - 0x11, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x3c, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, - 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x73, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x32, 0xe6, 0x15, 0x0a, 0x0f, 0x50, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x6e, 0x0a, 0x0d, 0x4c, 0x69, - 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x1c, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0x31, 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x5f, 0x0a, 0x1c, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, + 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3f, 0x0a, 0x07, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, + 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, + 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, + 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x70, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x22, 0x10, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, + 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x33, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x49, + 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x20, 0x0a, 0x04, 0x69, + 0x6e, 0x66, 0x6f, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x22, 0x44, 0x0a, + 0x04, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x0e, 0x0a, 0x02, 0x6f, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x6f, 0x73, 0x12, + 0x12, 0x0a, 0x04, 0x61, 0x72, 0x63, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x61, + 0x72, 0x63, 0x68, 0x22, 0x2c, 0x0a, 0x12, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x6c, 0x75, 0x67, 0x69, + 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x3a, 0x02, 0x18, + 0x01, 0x22, 0x51, 0x0a, 0x13, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x07, 0x70, 0x6c, 0x75, 0x67, + 0x69, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x31, 0x2e, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x70, 0x65, 0x63, 0x69, 0x66, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x07, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, + 0x3a, 0x02, 0x18, 0x01, 0x32, 0xe6, 0x15, 0x0a, 0x0f, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x6e, 0x0a, 0x0d, 0x4c, 0x69, 0x73, 0x74, + 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x20, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1a, - 0x62, 0x09, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x0d, 0x2f, 0x76, 0x31, - 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0xe7, 0x02, 0x0a, 0x0e, 0x43, - 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x95, 0x02, 0x92, - 0x41, 0xef, 0x01, 0x4a, 0x77, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, - 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x42, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, - 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, - 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x74, 0x0a, 0x03, - 0x34, 0x30, 0x39, 0x12, 0x6d, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x53, 0x0a, - 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, - 0x6e, 0x12, 0x3f, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x36, 0x2c, 0x20, - 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x61, 0x6c, 0x72, 0x65, - 0x61, 0x64, 0x79, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, - 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, - 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1c, 0x3a, 0x01, 0x2a, 0x62, 0x08, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x0d, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, - 0x69, 0x6e, 0x65, 0x73, 0x12, 0xea, 0x01, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, - 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa1, 0x01, - 0x92, 0x41, 0x7a, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, - 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, - 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, - 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x1e, 0x62, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x12, 0x2f, - 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, - 0x7d, 0x12, 0xe6, 0x03, 0x0a, 0x0e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x22, 0x94, 0x03, 0x92, 0x41, 0xe9, 0x02, 0x4a, 0x77, 0x0a, 0x03, 0x34, 0x30, - 0x30, 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, + 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x20, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1a, 0x62, 0x09, + 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0x0d, 0x2f, 0x76, 0x31, 0x2f, 0x70, + 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x12, 0xe7, 0x02, 0x0a, 0x0e, 0x43, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x95, 0x02, 0x92, 0x41, 0xef, + 0x01, 0x4a, 0x77, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x42, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, + 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, + 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, + 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x74, 0x0a, 0x03, 0x34, 0x30, + 0x39, 0x12, 0x6d, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x53, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, - 0x42, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, - 0x64, 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, - 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, - 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, - 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, - 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x74, 0x0a, - 0x03, 0x34, 0x30, 0x39, 0x12, 0x6d, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x53, - 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, - 0x6f, 0x6e, 0x12, 0x3f, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x36, 0x2c, - 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x61, 0x6c, 0x72, - 0x65, 0x61, 0x64, 0x79, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, - 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x21, 0x3a, 0x01, 0x2a, 0x62, 0x08, 0x70, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x1a, 0x12, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0xe5, 0x02, 0x0a, 0x0e, 0x44, - 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, - 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x93, 0x02, 0x92, - 0x41, 0xf5, 0x01, 0x4a, 0x79, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x72, 0x12, 0x16, 0x0a, 0x14, - 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, - 0x65, 0x22, 0x3a, 0x20, 0x39, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, - 0x3a, 0x20, 0x22, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6e, - 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, - 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, - 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, - 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, - 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, - 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, - 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x2a, 0x12, - 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, - 0x64, 0x7d, 0x12, 0xe8, 0x02, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, - 0x61, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x72, - 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x99, 0x02, 0x92, 0x41, 0xf5, 0x01, 0x4a, 0x79, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, - 0x72, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, - 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, - 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, - 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x39, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, - 0x72, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, + 0x3f, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x36, 0x2c, 0x20, 0x22, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, 0x64, + 0x79, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, + 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, + 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1c, 0x3a, 0x01, 0x2a, 0x62, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x22, 0x0d, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, + 0x65, 0x73, 0x12, 0xea, 0x01, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x12, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, + 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa1, 0x01, 0x92, 0x41, + 0x7a, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, + 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, + 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, + 0x1e, 0x62, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x12, 0x2f, 0x76, 0x31, + 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, + 0xe6, 0x03, 0x0a, 0x0e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x12, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0x94, 0x03, 0x92, 0x41, 0xe9, 0x02, 0x4a, 0x77, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, + 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, + 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, 0x70, 0x70, + 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x42, 0x7b, + 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x20, + 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, + 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, + 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, + 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x74, 0x0a, 0x03, 0x34, + 0x30, 0x39, 0x12, 0x6d, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x53, 0x0a, 0x10, + 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, + 0x12, 0x3f, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x36, 0x2c, 0x20, 0x22, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x61, 0x6c, 0x72, 0x65, 0x61, + 0x64, 0x79, 0x20, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, + 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x21, 0x3a, 0x01, 0x2a, 0x62, 0x08, 0x70, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x1a, 0x12, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0xe5, 0x02, 0x0a, 0x0e, 0x44, 0x65, 0x6c, + 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x93, 0x02, 0x92, 0x41, 0xf5, + 0x01, 0x4a, 0x79, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x72, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x22, 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, + 0x3a, 0x20, 0x39, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, + 0x22, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x69, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, + 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, + 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, + 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, + 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, + 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x14, 0x2a, 0x12, 0x2f, 0x76, + 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, + 0x12, 0xe8, 0x02, 0x0a, 0x0d, 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x12, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x72, + 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x50, + 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x99, 0x02, 0x92, 0x41, 0xf5, 0x01, 0x4a, 0x79, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x72, 0x12, + 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, + 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, + 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, 0x20, 0x22, + 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x39, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, + 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, 0x72, 0x65, + 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, + 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, + 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, + 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, + 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, + 0x1a, 0x22, 0x18, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, + 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0xe7, 0x02, 0x0a, 0x0c, + 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1b, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, + 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x31, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x9b, 0x02, 0x92, 0x41, 0xf5, 0x01, 0x4a, 0x79, + 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x72, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, + 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, + 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x39, + 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x66, 0x61, + 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, + 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, + 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, + 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, + 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, + 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, - 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, - 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, - 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, - 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x1a, 0x22, 0x18, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0xe7, 0x02, - 0x0a, 0x0c, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1b, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, - 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x74, 0x6f, 0x70, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, - 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x9b, 0x02, 0x92, 0x41, 0xf5, 0x01, - 0x4a, 0x79, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x72, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, + 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1c, 0x3a, 0x01, 0x2a, 0x22, 0x17, 0x2f, 0x76, + 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, + 0x2f, 0x73, 0x74, 0x6f, 0x70, 0x12, 0x6a, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x44, 0x4c, 0x51, 0x12, + 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x4c, 0x51, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, + 0x47, 0x65, 0x74, 0x44, 0x4c, 0x51, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x31, + 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2b, 0x62, 0x03, 0x64, 0x6c, 0x71, 0x12, 0x24, 0x2f, 0x76, 0x31, + 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, + 0x64, 0x65, 0x61, 0x64, 0x2d, 0x6c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x2d, 0x71, 0x75, 0x65, 0x75, + 0x65, 0x12, 0x78, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x4c, 0x51, 0x12, 0x18, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x4c, + 0x51, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x4c, 0x51, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x36, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, 0x3a, 0x03, 0x64, 0x6c, 0x71, + 0x62, 0x03, 0x64, 0x6c, 0x71, 0x1a, 0x24, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x64, 0x65, 0x61, 0x64, 0x2d, 0x6c, + 0x65, 0x74, 0x74, 0x65, 0x72, 0x2d, 0x71, 0x75, 0x65, 0x75, 0x65, 0x12, 0x7c, 0x0a, 0x0e, 0x45, + 0x78, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2b, 0x82, 0xd3, + 0xe4, 0x93, 0x02, 0x25, 0x62, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x19, + 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, + 0x64, 0x7d, 0x2f, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x81, 0x01, 0x0a, 0x0e, 0x49, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, + 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, + 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x30, 0x82, 0xd3, 0xe4, + 0x93, 0x02, 0x2a, 0x3a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x62, 0x08, 0x70, + 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x14, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, + 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x32, 0x8e, 0x10, + 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, + 0x63, 0x65, 0x12, 0x73, 0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, + 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, + 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x22, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1c, 0x62, 0x0a, 0x63, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x0e, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x84, 0x01, 0x0a, 0x10, 0x49, 0x6e, 0x73, 0x70, + 0x65, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1f, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x43, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, + 0x2b, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x25, 0x62, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, + 0x1b, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, + 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x69, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x30, 0x01, 0x12, 0xef, + 0x01, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, + 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, + 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa3, 0x01, 0x92, 0x41, 0x7a, + 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x22, 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, - 0x20, 0x39, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, - 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, - 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, - 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, + 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, + 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, + 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, + 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x20, + 0x62, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x13, 0x2f, 0x76, 0x31, + 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, + 0x12, 0xf5, 0x01, 0x0a, 0x0f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa0, 0x01, 0x92, 0x41, 0x79, 0x4a, 0x77, 0x0a, 0x03, 0x34, + 0x30, 0x30, 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, - 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, - 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1c, 0x3a, 0x01, 0x2a, 0x22, 0x17, - 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, - 0x64, 0x7d, 0x2f, 0x73, 0x74, 0x6f, 0x70, 0x12, 0x6a, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x44, 0x4c, - 0x51, 0x12, 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x4c, - 0x51, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x31, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x4c, 0x51, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0x31, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x2b, 0x62, 0x03, 0x64, 0x6c, 0x71, 0x12, 0x24, 0x2f, - 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, - 0x7d, 0x2f, 0x64, 0x65, 0x61, 0x64, 0x2d, 0x6c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x2d, 0x71, 0x75, - 0x65, 0x75, 0x65, 0x12, 0x78, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x4c, 0x51, - 0x12, 0x18, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x44, 0x4c, 0x51, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x44, 0x4c, 0x51, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x36, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x30, 0x3a, 0x03, 0x64, - 0x6c, 0x71, 0x62, 0x03, 0x64, 0x6c, 0x71, 0x1a, 0x24, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x64, 0x65, 0x61, 0x64, - 0x2d, 0x6c, 0x65, 0x74, 0x74, 0x65, 0x72, 0x2d, 0x71, 0x75, 0x65, 0x75, 0x65, 0x12, 0x7c, 0x0a, - 0x0e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, - 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x50, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x45, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2b, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x25, 0x62, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, - 0x22, 0x19, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, - 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x65, 0x78, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x81, 0x01, 0x0a, 0x0e, - 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x1d, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, - 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, - 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x50, 0x69, 0x70, - 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x30, 0x82, - 0xd3, 0xe4, 0x93, 0x02, 0x2a, 0x3a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x62, - 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x14, 0x2f, 0x76, 0x31, 0x2f, 0x70, - 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x32, - 0x81, 0x0f, 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x53, 0x65, 0x72, - 0x76, 0x69, 0x63, 0x65, 0x12, 0x73, 0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, - 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x22, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1c, 0x62, 0x0a, 0x63, - 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x0e, 0x2f, 0x76, 0x31, 0x2f, 0x63, - 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0x84, 0x01, 0x0a, 0x10, 0x49, 0x6e, - 0x73, 0x70, 0x65, 0x63, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1f, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x43, - 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, - 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x2b, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x25, 0x62, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, - 0x64, 0x12, 0x1b, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x69, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x30, 0x01, - 0x12, 0xef, 0x01, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, - 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, - 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa3, 0x01, 0x92, - 0x41, 0x7a, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, + 0x12, 0x42, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, + 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x3a, 0x01, 0x2a, 0x62, 0x09, 0x63, + 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x0e, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0xa2, 0x03, 0x0a, 0x11, 0x56, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x20, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, + 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, + 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0xc7, 0x02, 0x92, 0x41, 0xa1, 0x02, 0x4a, 0x9d, 0x01, 0x0a, 0x03, 0x34, + 0x30, 0x30, 0x12, 0x95, 0x01, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x7b, 0x0a, + 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, + 0x6e, 0x12, 0x67, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x39, 0x2c, 0x20, + 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x76, 0x61, 0x6c, 0x69, + 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, 0x60, 0x61, + 0x77, 0x73, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x60, 0x20, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x6d, 0x75, 0x73, + 0x74, 0x20, 0x62, 0x65, 0x20, 0x73, 0x65, 0x74, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, + 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x7f, 0x0a, 0x03, 0x35, 0x30, + 0x30, 0x12, 0x78, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x5e, 0x0a, 0x10, 0x61, + 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, + 0x4a, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x31, 0x33, 0x2c, 0x20, 0x22, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x63, 0x6f, 0x75, 0x6c, 0x64, + 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x64, 0x69, 0x73, 0x70, 0x65, 0x6e, 0x73, 0x65, 0x20, 0x64, 0x65, + 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, + 0x1c, 0x3a, 0x01, 0x2a, 0x22, 0x17, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x12, 0xf5, 0x02, + 0x0a, 0x0f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x22, 0xa0, 0x02, 0x92, 0x41, 0xf3, 0x01, 0x4a, 0x77, 0x0a, 0x03, 0x34, 0x30, 0x30, + 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, 0x70, + 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x42, + 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, + 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, @@ -4641,87 +5219,72 @@ var file_api_v1_api_proto_rawDesc = []byte{ 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, - 0x02, 0x20, 0x62, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x13, 0x2f, - 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, - 0x64, 0x7d, 0x12, 0xf5, 0x01, 0x0a, 0x0f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa0, 0x01, 0x92, 0x41, 0x79, 0x4a, 0x77, 0x0a, - 0x03, 0x34, 0x30, 0x30, 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, - 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, - 0x6f, 0x6e, 0x12, 0x42, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, - 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, - 0x61, 0x6c, 0x69, 0x64, 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, - 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x3a, 0x01, 0x2a, 0x62, - 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x22, 0x0e, 0x2f, 0x76, 0x31, 0x2f, - 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x12, 0xa2, 0x03, 0x0a, 0x11, 0x56, - 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, - 0x12, 0x20, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, 0x64, 0x61, - 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x61, 0x6c, 0x69, - 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xc7, 0x02, 0x92, 0x41, 0xa1, 0x02, 0x4a, 0x9d, 0x01, 0x0a, - 0x03, 0x34, 0x30, 0x30, 0x12, 0x95, 0x01, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, - 0x7b, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, - 0x73, 0x6f, 0x6e, 0x12, 0x67, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x39, - 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x76, 0x61, - 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x3a, 0x20, - 0x60, 0x61, 0x77, 0x73, 0x2e, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x4b, 0x65, 0x79, 0x49, 0x64, - 0x60, 0x20, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x20, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x20, 0x6d, - 0x75, 0x73, 0x74, 0x20, 0x62, 0x65, 0x20, 0x73, 0x65, 0x74, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, - 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x7f, 0x0a, 0x03, - 0x35, 0x30, 0x30, 0x12, 0x78, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x5e, 0x0a, + 0x02, 0x23, 0x3a, 0x01, 0x2a, 0x62, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x1a, 0x13, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, + 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0xe9, 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, + 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, + 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x94, 0x02, 0x92, 0x41, 0xf5, + 0x01, 0x4a, 0x79, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x72, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, + 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, + 0x75, 0x73, 0x22, 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, + 0x3a, 0x20, 0x39, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, + 0x22, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x69, + 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, + 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, + 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, - 0x6e, 0x12, 0x4a, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x31, 0x33, 0x2c, - 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x63, 0x6f, 0x75, - 0x6c, 0x64, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x64, 0x69, 0x73, 0x70, 0x65, 0x6e, 0x73, 0x65, 0x20, - 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x2c, 0x20, 0x22, 0x64, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x1c, 0x3a, 0x01, 0x2a, 0x22, 0x17, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x12, - 0xf5, 0x02, 0x0a, 0x0f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa0, 0x02, 0x92, 0x41, 0xf3, 0x01, 0x4a, 0x77, 0x0a, 0x03, 0x34, - 0x30, 0x30, 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, - 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, - 0x12, 0x42, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, - 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, - 0x69, 0x64, 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, - 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, - 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, - 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, - 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, - 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, - 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, - 0xe4, 0x93, 0x02, 0x23, 0x3a, 0x01, 0x2a, 0x62, 0x09, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x1a, 0x13, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, - 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0xe9, 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x6c, 0x65, - 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, - 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x94, 0x02, 0x92, - 0x41, 0xf5, 0x01, 0x4a, 0x79, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x72, 0x12, 0x16, 0x0a, 0x14, - 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, - 0x65, 0x22, 0x3a, 0x20, 0x39, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, - 0x3a, 0x20, 0x22, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6e, - 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, - 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, + 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, + 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, + 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x15, 0x2a, 0x13, 0x2f, 0x76, + 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, + 0x7d, 0x12, 0x8a, 0x01, 0x0a, 0x14, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, + 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x43, 0x6f, 0x6e, + 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x27, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x21, 0x62, 0x07, 0x70, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x12, 0x16, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, + 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x32, 0x86, + 0x0e, 0x0a, 0x10, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x53, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x12, 0x73, 0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, + 0x73, 0x73, 0x6f, 0x72, 0x73, 0x12, 0x1d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, + 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, + 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x22, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1c, 0x62, 0x0a, 0x70, 0x72, + 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x12, 0x0e, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, + 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x12, 0x8d, 0x01, 0x0a, 0x12, 0x49, 0x6e, 0x73, + 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x6e, 0x12, + 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, + 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x73, 0x70, + 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x6e, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2e, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x28, 0x62, 0x06, + 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x1e, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x63, + 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x69, 0x6e, 0x73, 0x70, + 0x65, 0x63, 0x74, 0x2d, 0x69, 0x6e, 0x30, 0x01, 0x12, 0x91, 0x01, 0x0a, 0x13, 0x49, 0x6e, 0x73, + 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x4f, 0x75, 0x74, + 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, + 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, + 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x4f, 0x75, + 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2f, 0x82, 0xd3, 0xe4, 0x93, 0x02, + 0x29, 0x62, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x1f, 0x2f, 0x76, 0x31, 0x2f, 0x70, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x2f, 0x69, + 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x2d, 0x6f, 0x75, 0x74, 0x30, 0x01, 0x12, 0xef, 0x01, 0x0a, + 0x0c, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, 0x1b, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa3, 0x01, 0x92, 0x41, 0x7a, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, @@ -4729,153 +5292,121 @@ var file_api_v1_api_proto_rawDesc = []byte{ 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, - 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x15, 0x2a, 0x13, - 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x73, 0x2f, 0x7b, - 0x69, 0x64, 0x7d, 0x32, 0xf9, 0x0c, 0x0a, 0x10, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, - 0x72, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x73, 0x0a, 0x0e, 0x4c, 0x69, 0x73, 0x74, - 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x12, 0x1d, 0x2e, 0x61, 0x70, 0x69, - 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, - 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, - 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x22, 0x82, 0xd3, 0xe4, 0x93, 0x02, - 0x1c, 0x62, 0x0a, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x12, 0x0e, 0x2f, - 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x12, 0x8d, 0x01, - 0x0a, 0x12, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, - 0x6f, 0x72, 0x49, 0x6e, 0x12, 0x21, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, - 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x49, 0x6e, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, - 0x2e, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, - 0x72, 0x49, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2e, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x28, 0x62, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x1e, 0x2f, 0x76, 0x31, - 0x2f, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, - 0x2f, 0x69, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x2d, 0x69, 0x6e, 0x30, 0x01, 0x12, 0x91, 0x01, - 0x0a, 0x13, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, - 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x12, 0x22, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x49, - 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x4f, - 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, - 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, - 0x73, 0x6f, 0x72, 0x4f, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2f, - 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x29, 0x62, 0x06, 0x72, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x1f, - 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, - 0x69, 0x64, 0x7d, 0x2f, 0x69, 0x6e, 0x73, 0x70, 0x65, 0x63, 0x74, 0x2d, 0x6f, 0x75, 0x74, 0x30, - 0x01, 0x12, 0xef, 0x01, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, - 0x6f, 0x72, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, - 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x63, - 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa3, 0x01, - 0x92, 0x41, 0x7a, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, - 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, - 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, - 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, - 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, - 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, - 0x93, 0x02, 0x20, 0x62, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, 0x13, - 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, - 0x69, 0x64, 0x7d, 0x12, 0xf5, 0x01, 0x0a, 0x0f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, - 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, - 0x2e, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa0, 0x01, 0x92, 0x41, 0x79, 0x4a, 0x77, - 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, - 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, - 0x56, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, - 0x73, 0x6f, 0x6e, 0x12, 0x42, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, - 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, - 0x76, 0x61, 0x6c, 0x69, 0x64, 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, - 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x3a, 0x01, 0x2a, - 0x62, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x22, 0x0e, 0x2f, 0x76, 0x31, - 0x2f, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x12, 0xf5, 0x02, 0x0a, 0x0f, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, - 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, - 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, - 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x22, 0xa0, 0x02, 0x92, 0x41, 0xf3, 0x01, 0x4a, 0x77, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x70, - 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, - 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x42, 0x7b, 0x20, - 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, - 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x20, 0x61, - 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, - 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, - 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, - 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, - 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, - 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, - 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, - 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, - 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, - 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, - 0x3a, 0x01, 0x2a, 0x62, 0x09, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x1a, 0x13, - 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, - 0x69, 0x64, 0x7d, 0x12, 0xe9, 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x72, - 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, - 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, - 0x2e, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x94, 0x02, 0x92, 0x41, 0xf5, 0x01, 0x4a, - 0x79, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x72, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, - 0x22, 0x58, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, - 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x44, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, - 0x39, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x66, - 0x61, 0x69, 0x6c, 0x65, 0x64, 0x20, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, - 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, - 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, - 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, - 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, - 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, - 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, - 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x15, 0x2a, 0x13, 0x2f, 0x76, 0x31, 0x2f, - 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x32, - 0x65, 0x0a, 0x12, 0x49, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, - 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x4f, 0x0a, 0x07, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, - 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, - 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, - 0x31, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x22, 0x13, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x0d, 0x62, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x12, - 0x05, 0x2f, 0x69, 0x6e, 0x66, 0x6f, 0x32, 0x75, 0x0a, 0x0d, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, - 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x64, 0x0a, 0x0b, 0x4c, 0x69, 0x73, 0x74, 0x50, - 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x12, 0x1a, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, - 0x4c, 0x69, 0x73, 0x74, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, - 0x1c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x16, 0x62, 0x07, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, - 0x12, 0x0b, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x42, 0xa2, 0x03, - 0x92, 0x41, 0x9e, 0x02, 0x12, 0xac, 0x01, 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, - 0x20, 0x52, 0x45, 0x53, 0x54, 0x20, 0x41, 0x50, 0x49, 0x22, 0x37, 0x0a, 0x0f, 0x43, 0x6f, 0x6e, - 0x64, 0x75, 0x69, 0x74, 0x20, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x24, 0x68, 0x74, - 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x63, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x64, 0x75, - 0x69, 0x74, 0x2a, 0x57, 0x0a, 0x1a, 0x41, 0x70, 0x61, 0x63, 0x68, 0x65, 0x20, 0x4c, 0x69, 0x63, - 0x65, 0x6e, 0x73, 0x65, 0x20, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x32, 0x2e, 0x30, - 0x12, 0x39, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, - 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x43, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x49, 0x4f, 0x2f, 0x63, - 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, 0x69, 0x6e, - 0x2f, 0x4c, 0x49, 0x43, 0x45, 0x4e, 0x53, 0x45, 0x2e, 0x6d, 0x64, 0x32, 0x06, 0x76, 0x30, 0x2e, - 0x31, 0x2e, 0x30, 0x52, 0x6d, 0x0a, 0x03, 0x35, 0x30, 0x30, 0x12, 0x66, 0x12, 0x16, 0x0a, 0x14, - 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, - 0x61, 0x74, 0x75, 0x73, 0x22, 0x4c, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x38, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, - 0x65, 0x22, 0x3a, 0x20, 0x31, 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, - 0x22, 0x3a, 0x20, 0x22, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x20, 0x62, 0x09, + 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, 0x13, 0x2f, 0x76, 0x31, 0x2f, 0x70, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0xf5, + 0x01, 0x0a, 0x0f, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, + 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, + 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x72, 0x65, 0x61, + 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0xa0, 0x01, 0x92, 0x41, 0x79, 0x4a, 0x77, 0x0a, 0x03, 0x34, 0x30, 0x30, + 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, + 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, 0x70, + 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x42, + 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, - 0x20, 0x7d, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x42, 0x08, - 0x41, 0x70, 0x69, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x69, 0x6f, - 0x2f, 0x63, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, - 0x70, 0x69, 0x2f, 0x76, 0x31, 0x3b, 0x61, 0x70, 0x69, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x41, 0x58, - 0x58, 0xaa, 0x02, 0x06, 0x41, 0x70, 0x69, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x06, 0x41, 0x70, 0x69, - 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x12, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x07, 0x41, 0x70, 0x69, 0x3a, 0x3a, - 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1e, 0x3a, 0x01, 0x2a, 0x62, 0x09, 0x70, 0x72, 0x6f, + 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x22, 0x0e, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x63, + 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x12, 0xf5, 0x02, 0x0a, 0x0f, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, + 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa0, 0x02, 0x92, 0x41, + 0xf3, 0x01, 0x4a, 0x77, 0x0a, 0x03, 0x34, 0x30, 0x30, 0x12, 0x70, 0x12, 0x16, 0x0a, 0x14, 0x1a, + 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, + 0x74, 0x75, 0x73, 0x22, 0x56, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x42, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, + 0x22, 0x3a, 0x20, 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, + 0x20, 0x22, 0x69, 0x6e, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x20, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, + 0x6e, 0x74, 0x73, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, + 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, + 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, + 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, + 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, + 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, + 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, + 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x3a, 0x01, 0x2a, 0x62, 0x09, + 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x1a, 0x13, 0x2f, 0x76, 0x31, 0x2f, 0x70, + 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0xe9, + 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, + 0x6f, 0x72, 0x12, 0x1e, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x65, 0x6c, 0x65, + 0x74, 0x65, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x94, 0x02, 0x92, 0x41, 0xf5, 0x01, 0x4a, 0x79, 0x0a, 0x03, 0x34, 0x30, + 0x30, 0x12, 0x72, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x58, 0x0a, 0x10, 0x61, + 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, + 0x44, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x39, 0x2c, 0x20, 0x22, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x66, 0x61, 0x69, 0x6c, 0x65, 0x64, + 0x20, 0x70, 0x72, 0x65, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x20, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, + 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x4a, 0x78, 0x0a, 0x03, 0x34, 0x30, 0x34, 0x12, 0x71, 0x12, 0x16, + 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, + 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x57, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, + 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, 0x73, 0x6f, 0x6e, 0x12, 0x43, 0x7b, 0x20, 0x22, 0x63, + 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x35, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, + 0x65, 0x22, 0x3a, 0x20, 0x22, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x20, 0x6e, 0x6f, + 0x74, 0x20, 0x66, 0x6f, 0x75, 0x6e, 0x64, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, + 0x22, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x82, + 0xd3, 0xe4, 0x93, 0x02, 0x15, 0x2a, 0x13, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x63, 0x65, + 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, 0x7b, 0x69, 0x64, 0x7d, 0x12, 0x8a, 0x01, 0x0a, 0x14, 0x4c, + 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, + 0x69, 0x6e, 0x73, 0x12, 0x23, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, + 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, + 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x24, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x50, + 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x27, + 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x21, 0x62, 0x07, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x12, + 0x16, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x63, 0x65, 0x73, 0x73, 0x6f, 0x72, 0x73, 0x2f, + 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x32, 0x65, 0x0a, 0x12, 0x49, 0x6e, 0x66, 0x6f, 0x72, + 0x6d, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x4f, 0x0a, + 0x07, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x17, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x47, 0x65, 0x74, 0x49, 0x6e, 0x66, + 0x6f, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x13, 0x82, 0xd3, 0xe4, 0x93, 0x02, + 0x0d, 0x62, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x12, 0x05, 0x2f, 0x69, 0x6e, 0x66, 0x6f, 0x32, 0x78, + 0x0a, 0x0d, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, + 0x67, 0x0a, 0x0b, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x12, 0x1a, + 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x6c, 0x75, 0x67, + 0x69, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x61, 0x70, 0x69, + 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x50, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x16, 0x62, + 0x07, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x73, 0x12, 0x0b, 0x2f, 0x76, 0x31, 0x2f, 0x70, 0x6c, + 0x75, 0x67, 0x69, 0x6e, 0x73, 0x88, 0x02, 0x01, 0x42, 0xa2, 0x03, 0x92, 0x41, 0x9e, 0x02, 0x12, + 0xac, 0x01, 0x0a, 0x10, 0x43, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x20, 0x52, 0x45, 0x53, 0x54, + 0x20, 0x41, 0x50, 0x49, 0x22, 0x37, 0x0a, 0x0f, 0x43, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x20, + 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x24, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, + 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x6e, 0x64, + 0x75, 0x69, 0x74, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x2a, 0x57, 0x0a, + 0x1a, 0x41, 0x70, 0x61, 0x63, 0x68, 0x65, 0x20, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x20, + 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x20, 0x32, 0x2e, 0x30, 0x12, 0x39, 0x68, 0x74, 0x74, + 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x43, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x49, 0x4f, 0x2f, 0x63, 0x6f, 0x6e, 0x64, 0x75, 0x69, + 0x74, 0x2f, 0x62, 0x6c, 0x6f, 0x62, 0x2f, 0x6d, 0x61, 0x69, 0x6e, 0x2f, 0x4c, 0x49, 0x43, 0x45, + 0x4e, 0x53, 0x45, 0x2e, 0x6d, 0x64, 0x32, 0x06, 0x76, 0x30, 0x2e, 0x31, 0x2e, 0x30, 0x52, 0x6d, + 0x0a, 0x03, 0x35, 0x30, 0x30, 0x12, 0x66, 0x12, 0x16, 0x0a, 0x14, 0x1a, 0x12, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x72, 0x70, 0x63, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, + 0x4c, 0x0a, 0x10, 0x61, 0x70, 0x70, 0x6c, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x6a, + 0x73, 0x6f, 0x6e, 0x12, 0x38, 0x7b, 0x20, 0x22, 0x63, 0x6f, 0x64, 0x65, 0x22, 0x3a, 0x20, 0x31, + 0x33, 0x2c, 0x20, 0x22, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x22, 0x3a, 0x20, 0x22, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x20, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x2c, 0x20, 0x22, 0x64, + 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x22, 0x3a, 0x20, 0x5b, 0x5d, 0x20, 0x7d, 0x0a, 0x0a, 0x63, + 0x6f, 0x6d, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x42, 0x08, 0x41, 0x70, 0x69, 0x50, 0x72, + 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x63, 0x6f, 0x6e, 0x64, 0x75, 0x69, 0x74, 0x69, 0x6f, 0x2f, 0x63, 0x6f, 0x6e, 0x64, + 0x75, 0x69, 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, + 0x3b, 0x61, 0x70, 0x69, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x41, 0x58, 0x58, 0xaa, 0x02, 0x06, 0x41, + 0x70, 0x69, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x06, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, 0xe2, 0x02, + 0x12, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, + 0x61, 0x74, 0x61, 0xea, 0x02, 0x07, 0x41, 0x70, 0x69, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -4891,7 +5422,7 @@ func file_api_v1_api_proto_rawDescGZIP() []byte { } var file_api_v1_api_proto_enumTypes = make([]protoimpl.EnumInfo, 5) -var file_api_v1_api_proto_msgTypes = make([]protoimpl.MessageInfo, 75) +var file_api_v1_api_proto_msgTypes = make([]protoimpl.MessageInfo, 84) var file_api_v1_api_proto_goTypes = []interface{}{ (Pipeline_Status)(0), // 0: api.v1.Pipeline.Status (Connector_Type)(0), // 1: api.v1.Connector.Type @@ -4901,201 +5432,223 @@ var file_api_v1_api_proto_goTypes = []interface{}{ (*Pipeline)(nil), // 5: api.v1.Pipeline (*Connector)(nil), // 6: api.v1.Connector (*Processor)(nil), // 7: api.v1.Processor - (*ListPipelinesRequest)(nil), // 8: api.v1.ListPipelinesRequest - (*ListPipelinesResponse)(nil), // 9: api.v1.ListPipelinesResponse - (*CreatePipelineRequest)(nil), // 10: api.v1.CreatePipelineRequest - (*CreatePipelineResponse)(nil), // 11: api.v1.CreatePipelineResponse - (*GetPipelineRequest)(nil), // 12: api.v1.GetPipelineRequest - (*GetPipelineResponse)(nil), // 13: api.v1.GetPipelineResponse - (*UpdatePipelineRequest)(nil), // 14: api.v1.UpdatePipelineRequest - (*UpdatePipelineResponse)(nil), // 15: api.v1.UpdatePipelineResponse - (*DeletePipelineRequest)(nil), // 16: api.v1.DeletePipelineRequest - (*DeletePipelineResponse)(nil), // 17: api.v1.DeletePipelineResponse - (*StartPipelineRequest)(nil), // 18: api.v1.StartPipelineRequest - (*StartPipelineResponse)(nil), // 19: api.v1.StartPipelineResponse - (*StopPipelineRequest)(nil), // 20: api.v1.StopPipelineRequest - (*StopPipelineResponse)(nil), // 21: api.v1.StopPipelineResponse - (*GetDLQRequest)(nil), // 22: api.v1.GetDLQRequest - (*GetDLQResponse)(nil), // 23: api.v1.GetDLQResponse - (*UpdateDLQRequest)(nil), // 24: api.v1.UpdateDLQRequest - (*UpdateDLQResponse)(nil), // 25: api.v1.UpdateDLQResponse - (*ExportPipelineRequest)(nil), // 26: api.v1.ExportPipelineRequest - (*ExportPipelineResponse)(nil), // 27: api.v1.ExportPipelineResponse - (*ImportPipelineRequest)(nil), // 28: api.v1.ImportPipelineRequest - (*ImportPipelineResponse)(nil), // 29: api.v1.ImportPipelineResponse - (*CreateConnectorRequest)(nil), // 30: api.v1.CreateConnectorRequest - (*CreateConnectorResponse)(nil), // 31: api.v1.CreateConnectorResponse - (*ValidateConnectorRequest)(nil), // 32: api.v1.ValidateConnectorRequest - (*ValidateConnectorResponse)(nil), // 33: api.v1.ValidateConnectorResponse - (*ListConnectorsRequest)(nil), // 34: api.v1.ListConnectorsRequest - (*ListConnectorsResponse)(nil), // 35: api.v1.ListConnectorsResponse - (*InspectConnectorRequest)(nil), // 36: api.v1.InspectConnectorRequest - (*InspectConnectorResponse)(nil), // 37: api.v1.InspectConnectorResponse - (*GetConnectorRequest)(nil), // 38: api.v1.GetConnectorRequest - (*GetConnectorResponse)(nil), // 39: api.v1.GetConnectorResponse - (*UpdateConnectorRequest)(nil), // 40: api.v1.UpdateConnectorRequest - (*UpdateConnectorResponse)(nil), // 41: api.v1.UpdateConnectorResponse - (*DeleteConnectorRequest)(nil), // 42: api.v1.DeleteConnectorRequest - (*DeleteConnectorResponse)(nil), // 43: api.v1.DeleteConnectorResponse - (*ListProcessorsRequest)(nil), // 44: api.v1.ListProcessorsRequest - (*ListProcessorsResponse)(nil), // 45: api.v1.ListProcessorsResponse - (*InspectProcessorInRequest)(nil), // 46: api.v1.InspectProcessorInRequest - (*InspectProcessorInResponse)(nil), // 47: api.v1.InspectProcessorInResponse - (*InspectProcessorOutRequest)(nil), // 48: api.v1.InspectProcessorOutRequest - (*InspectProcessorOutResponse)(nil), // 49: api.v1.InspectProcessorOutResponse - (*CreateProcessorRequest)(nil), // 50: api.v1.CreateProcessorRequest - (*CreateProcessorResponse)(nil), // 51: api.v1.CreateProcessorResponse - (*GetProcessorRequest)(nil), // 52: api.v1.GetProcessorRequest - (*GetProcessorResponse)(nil), // 53: api.v1.GetProcessorResponse - (*UpdateProcessorRequest)(nil), // 54: api.v1.UpdateProcessorRequest - (*UpdateProcessorResponse)(nil), // 55: api.v1.UpdateProcessorResponse - (*DeleteProcessorRequest)(nil), // 56: api.v1.DeleteProcessorRequest - (*DeleteProcessorResponse)(nil), // 57: api.v1.DeleteProcessorResponse - (*GetInfoRequest)(nil), // 58: api.v1.GetInfoRequest - (*GetInfoResponse)(nil), // 59: api.v1.GetInfoResponse - (*Info)(nil), // 60: api.v1.Info - (*ListPluginsRequest)(nil), // 61: api.v1.ListPluginsRequest - (*ListPluginsResponse)(nil), // 62: api.v1.ListPluginsResponse - (*PluginSpecifications)(nil), // 63: api.v1.PluginSpecifications - (*Pipeline_State)(nil), // 64: api.v1.Pipeline.State - (*Pipeline_Config)(nil), // 65: api.v1.Pipeline.Config - (*Pipeline_DLQ)(nil), // 66: api.v1.Pipeline.DLQ - nil, // 67: api.v1.Pipeline.DLQ.SettingsEntry - (*Connector_SourceState)(nil), // 68: api.v1.Connector.SourceState - (*Connector_DestinationState)(nil), // 69: api.v1.Connector.DestinationState - (*Connector_Config)(nil), // 70: api.v1.Connector.Config - nil, // 71: api.v1.Connector.DestinationState.PositionsEntry - nil, // 72: api.v1.Connector.Config.SettingsEntry - (*Processor_Parent)(nil), // 73: api.v1.Processor.Parent - (*Processor_Config)(nil), // 74: api.v1.Processor.Config - nil, // 75: api.v1.Processor.Config.SettingsEntry - (*PluginSpecifications_Parameter)(nil), // 76: api.v1.PluginSpecifications.Parameter - nil, // 77: api.v1.PluginSpecifications.DestinationParamsEntry - nil, // 78: api.v1.PluginSpecifications.SourceParamsEntry - (*PluginSpecifications_Parameter_Validation)(nil), // 79: api.v1.PluginSpecifications.Parameter.Validation - (*timestamppb.Timestamp)(nil), // 80: google.protobuf.Timestamp - (*v1.Record)(nil), // 81: opencdc.v1.Record + (*ConnectorPluginSpecifications)(nil), // 8: api.v1.ConnectorPluginSpecifications + (*ProcessorPluginSpecifications)(nil), // 9: api.v1.ProcessorPluginSpecifications + (*PluginSpecifications)(nil), // 10: api.v1.PluginSpecifications + (*ListPipelinesRequest)(nil), // 11: api.v1.ListPipelinesRequest + (*ListPipelinesResponse)(nil), // 12: api.v1.ListPipelinesResponse + (*CreatePipelineRequest)(nil), // 13: api.v1.CreatePipelineRequest + (*CreatePipelineResponse)(nil), // 14: api.v1.CreatePipelineResponse + (*GetPipelineRequest)(nil), // 15: api.v1.GetPipelineRequest + (*GetPipelineResponse)(nil), // 16: api.v1.GetPipelineResponse + (*UpdatePipelineRequest)(nil), // 17: api.v1.UpdatePipelineRequest + (*UpdatePipelineResponse)(nil), // 18: api.v1.UpdatePipelineResponse + (*DeletePipelineRequest)(nil), // 19: api.v1.DeletePipelineRequest + (*DeletePipelineResponse)(nil), // 20: api.v1.DeletePipelineResponse + (*StartPipelineRequest)(nil), // 21: api.v1.StartPipelineRequest + (*StartPipelineResponse)(nil), // 22: api.v1.StartPipelineResponse + (*StopPipelineRequest)(nil), // 23: api.v1.StopPipelineRequest + (*StopPipelineResponse)(nil), // 24: api.v1.StopPipelineResponse + (*GetDLQRequest)(nil), // 25: api.v1.GetDLQRequest + (*GetDLQResponse)(nil), // 26: api.v1.GetDLQResponse + (*UpdateDLQRequest)(nil), // 27: api.v1.UpdateDLQRequest + (*UpdateDLQResponse)(nil), // 28: api.v1.UpdateDLQResponse + (*ExportPipelineRequest)(nil), // 29: api.v1.ExportPipelineRequest + (*ExportPipelineResponse)(nil), // 30: api.v1.ExportPipelineResponse + (*ImportPipelineRequest)(nil), // 31: api.v1.ImportPipelineRequest + (*ImportPipelineResponse)(nil), // 32: api.v1.ImportPipelineResponse + (*CreateConnectorRequest)(nil), // 33: api.v1.CreateConnectorRequest + (*CreateConnectorResponse)(nil), // 34: api.v1.CreateConnectorResponse + (*ValidateConnectorRequest)(nil), // 35: api.v1.ValidateConnectorRequest + (*ValidateConnectorResponse)(nil), // 36: api.v1.ValidateConnectorResponse + (*ListConnectorsRequest)(nil), // 37: api.v1.ListConnectorsRequest + (*ListConnectorsResponse)(nil), // 38: api.v1.ListConnectorsResponse + (*InspectConnectorRequest)(nil), // 39: api.v1.InspectConnectorRequest + (*InspectConnectorResponse)(nil), // 40: api.v1.InspectConnectorResponse + (*GetConnectorRequest)(nil), // 41: api.v1.GetConnectorRequest + (*GetConnectorResponse)(nil), // 42: api.v1.GetConnectorResponse + (*UpdateConnectorRequest)(nil), // 43: api.v1.UpdateConnectorRequest + (*UpdateConnectorResponse)(nil), // 44: api.v1.UpdateConnectorResponse + (*DeleteConnectorRequest)(nil), // 45: api.v1.DeleteConnectorRequest + (*DeleteConnectorResponse)(nil), // 46: api.v1.DeleteConnectorResponse + (*ListConnectorPluginsRequest)(nil), // 47: api.v1.ListConnectorPluginsRequest + (*ListConnectorPluginsResponse)(nil), // 48: api.v1.ListConnectorPluginsResponse + (*ListProcessorsRequest)(nil), // 49: api.v1.ListProcessorsRequest + (*ListProcessorsResponse)(nil), // 50: api.v1.ListProcessorsResponse + (*InspectProcessorInRequest)(nil), // 51: api.v1.InspectProcessorInRequest + (*InspectProcessorInResponse)(nil), // 52: api.v1.InspectProcessorInResponse + (*InspectProcessorOutRequest)(nil), // 53: api.v1.InspectProcessorOutRequest + (*InspectProcessorOutResponse)(nil), // 54: api.v1.InspectProcessorOutResponse + (*CreateProcessorRequest)(nil), // 55: api.v1.CreateProcessorRequest + (*CreateProcessorResponse)(nil), // 56: api.v1.CreateProcessorResponse + (*GetProcessorRequest)(nil), // 57: api.v1.GetProcessorRequest + (*GetProcessorResponse)(nil), // 58: api.v1.GetProcessorResponse + (*UpdateProcessorRequest)(nil), // 59: api.v1.UpdateProcessorRequest + (*UpdateProcessorResponse)(nil), // 60: api.v1.UpdateProcessorResponse + (*DeleteProcessorRequest)(nil), // 61: api.v1.DeleteProcessorRequest + (*DeleteProcessorResponse)(nil), // 62: api.v1.DeleteProcessorResponse + (*ListProcessorPluginsRequest)(nil), // 63: api.v1.ListProcessorPluginsRequest + (*ListProcessorPluginsResponse)(nil), // 64: api.v1.ListProcessorPluginsResponse + (*GetInfoRequest)(nil), // 65: api.v1.GetInfoRequest + (*GetInfoResponse)(nil), // 66: api.v1.GetInfoResponse + (*Info)(nil), // 67: api.v1.Info + (*ListPluginsRequest)(nil), // 68: api.v1.ListPluginsRequest + (*ListPluginsResponse)(nil), // 69: api.v1.ListPluginsResponse + (*Pipeline_State)(nil), // 70: api.v1.Pipeline.State + (*Pipeline_Config)(nil), // 71: api.v1.Pipeline.Config + (*Pipeline_DLQ)(nil), // 72: api.v1.Pipeline.DLQ + nil, // 73: api.v1.Pipeline.DLQ.SettingsEntry + (*Connector_SourceState)(nil), // 74: api.v1.Connector.SourceState + (*Connector_DestinationState)(nil), // 75: api.v1.Connector.DestinationState + (*Connector_Config)(nil), // 76: api.v1.Connector.Config + nil, // 77: api.v1.Connector.DestinationState.PositionsEntry + nil, // 78: api.v1.Connector.Config.SettingsEntry + (*Processor_Parent)(nil), // 79: api.v1.Processor.Parent + (*Processor_Config)(nil), // 80: api.v1.Processor.Config + nil, // 81: api.v1.Processor.Config.SettingsEntry + nil, // 82: api.v1.ConnectorPluginSpecifications.DestinationParamsEntry + nil, // 83: api.v1.ConnectorPluginSpecifications.SourceParamsEntry + nil, // 84: api.v1.ProcessorPluginSpecifications.ParametersEntry + (*PluginSpecifications_Parameter)(nil), // 85: api.v1.PluginSpecifications.Parameter + nil, // 86: api.v1.PluginSpecifications.DestinationParamsEntry + nil, // 87: api.v1.PluginSpecifications.SourceParamsEntry + (*PluginSpecifications_Parameter_Validation)(nil), // 88: api.v1.PluginSpecifications.Parameter.Validation + (*timestamppb.Timestamp)(nil), // 89: google.protobuf.Timestamp + (*v11.Record)(nil), // 90: opencdc.v1.Record + (*v1.Parameter)(nil), // 91: config.v1.Parameter } var file_api_v1_api_proto_depIdxs = []int32{ - 64, // 0: api.v1.Pipeline.state:type_name -> api.v1.Pipeline.State - 65, // 1: api.v1.Pipeline.config:type_name -> api.v1.Pipeline.Config - 80, // 2: api.v1.Pipeline.created_at:type_name -> google.protobuf.Timestamp - 80, // 3: api.v1.Pipeline.updated_at:type_name -> google.protobuf.Timestamp - 69, // 4: api.v1.Connector.destination_state:type_name -> api.v1.Connector.DestinationState - 68, // 5: api.v1.Connector.source_state:type_name -> api.v1.Connector.SourceState - 70, // 6: api.v1.Connector.config:type_name -> api.v1.Connector.Config + 70, // 0: api.v1.Pipeline.state:type_name -> api.v1.Pipeline.State + 71, // 1: api.v1.Pipeline.config:type_name -> api.v1.Pipeline.Config + 89, // 2: api.v1.Pipeline.created_at:type_name -> google.protobuf.Timestamp + 89, // 3: api.v1.Pipeline.updated_at:type_name -> google.protobuf.Timestamp + 75, // 4: api.v1.Connector.destination_state:type_name -> api.v1.Connector.DestinationState + 74, // 5: api.v1.Connector.source_state:type_name -> api.v1.Connector.SourceState + 76, // 6: api.v1.Connector.config:type_name -> api.v1.Connector.Config 1, // 7: api.v1.Connector.type:type_name -> api.v1.Connector.Type - 80, // 8: api.v1.Connector.created_at:type_name -> google.protobuf.Timestamp - 80, // 9: api.v1.Connector.updated_at:type_name -> google.protobuf.Timestamp - 74, // 10: api.v1.Processor.config:type_name -> api.v1.Processor.Config - 73, // 11: api.v1.Processor.parent:type_name -> api.v1.Processor.Parent - 80, // 12: api.v1.Processor.created_at:type_name -> google.protobuf.Timestamp - 80, // 13: api.v1.Processor.updated_at:type_name -> google.protobuf.Timestamp - 5, // 14: api.v1.ListPipelinesResponse.pipelines:type_name -> api.v1.Pipeline - 65, // 15: api.v1.CreatePipelineRequest.config:type_name -> api.v1.Pipeline.Config - 5, // 16: api.v1.CreatePipelineResponse.pipeline:type_name -> api.v1.Pipeline - 5, // 17: api.v1.GetPipelineResponse.pipeline:type_name -> api.v1.Pipeline - 65, // 18: api.v1.UpdatePipelineRequest.config:type_name -> api.v1.Pipeline.Config - 5, // 19: api.v1.UpdatePipelineResponse.pipeline:type_name -> api.v1.Pipeline - 66, // 20: api.v1.GetDLQResponse.dlq:type_name -> api.v1.Pipeline.DLQ - 66, // 21: api.v1.UpdateDLQRequest.dlq:type_name -> api.v1.Pipeline.DLQ - 66, // 22: api.v1.UpdateDLQResponse.dlq:type_name -> api.v1.Pipeline.DLQ - 5, // 23: api.v1.ExportPipelineResponse.pipeline:type_name -> api.v1.Pipeline - 5, // 24: api.v1.ImportPipelineRequest.pipeline:type_name -> api.v1.Pipeline - 5, // 25: api.v1.ImportPipelineResponse.pipeline:type_name -> api.v1.Pipeline - 1, // 26: api.v1.CreateConnectorRequest.type:type_name -> api.v1.Connector.Type - 70, // 27: api.v1.CreateConnectorRequest.config:type_name -> api.v1.Connector.Config - 6, // 28: api.v1.CreateConnectorResponse.connector:type_name -> api.v1.Connector - 1, // 29: api.v1.ValidateConnectorRequest.type:type_name -> api.v1.Connector.Type - 70, // 30: api.v1.ValidateConnectorRequest.config:type_name -> api.v1.Connector.Config - 6, // 31: api.v1.ListConnectorsResponse.connectors:type_name -> api.v1.Connector - 81, // 32: api.v1.InspectConnectorResponse.record:type_name -> opencdc.v1.Record - 6, // 33: api.v1.GetConnectorResponse.connector:type_name -> api.v1.Connector - 70, // 34: api.v1.UpdateConnectorRequest.config:type_name -> api.v1.Connector.Config - 6, // 35: api.v1.UpdateConnectorResponse.connector:type_name -> api.v1.Connector - 7, // 36: api.v1.ListProcessorsResponse.processors:type_name -> api.v1.Processor - 81, // 37: api.v1.InspectProcessorInResponse.record:type_name -> opencdc.v1.Record - 81, // 38: api.v1.InspectProcessorOutResponse.record:type_name -> opencdc.v1.Record - 73, // 39: api.v1.CreateProcessorRequest.parent:type_name -> api.v1.Processor.Parent - 74, // 40: api.v1.CreateProcessorRequest.config:type_name -> api.v1.Processor.Config - 7, // 41: api.v1.CreateProcessorResponse.processor:type_name -> api.v1.Processor - 7, // 42: api.v1.GetProcessorResponse.processor:type_name -> api.v1.Processor - 74, // 43: api.v1.UpdateProcessorRequest.config:type_name -> api.v1.Processor.Config - 7, // 44: api.v1.UpdateProcessorResponse.processor:type_name -> api.v1.Processor - 60, // 45: api.v1.GetInfoResponse.info:type_name -> api.v1.Info - 63, // 46: api.v1.ListPluginsResponse.plugins:type_name -> api.v1.PluginSpecifications - 77, // 47: api.v1.PluginSpecifications.destination_params:type_name -> api.v1.PluginSpecifications.DestinationParamsEntry - 78, // 48: api.v1.PluginSpecifications.source_params:type_name -> api.v1.PluginSpecifications.SourceParamsEntry - 0, // 49: api.v1.Pipeline.State.status:type_name -> api.v1.Pipeline.Status - 67, // 50: api.v1.Pipeline.DLQ.settings:type_name -> api.v1.Pipeline.DLQ.SettingsEntry - 71, // 51: api.v1.Connector.DestinationState.positions:type_name -> api.v1.Connector.DestinationState.PositionsEntry - 72, // 52: api.v1.Connector.Config.settings:type_name -> api.v1.Connector.Config.SettingsEntry - 2, // 53: api.v1.Processor.Parent.type:type_name -> api.v1.Processor.Parent.Type - 75, // 54: api.v1.Processor.Config.settings:type_name -> api.v1.Processor.Config.SettingsEntry - 3, // 55: api.v1.PluginSpecifications.Parameter.type:type_name -> api.v1.PluginSpecifications.Parameter.Type - 79, // 56: api.v1.PluginSpecifications.Parameter.validations:type_name -> api.v1.PluginSpecifications.Parameter.Validation - 76, // 57: api.v1.PluginSpecifications.DestinationParamsEntry.value:type_name -> api.v1.PluginSpecifications.Parameter - 76, // 58: api.v1.PluginSpecifications.SourceParamsEntry.value:type_name -> api.v1.PluginSpecifications.Parameter - 4, // 59: api.v1.PluginSpecifications.Parameter.Validation.type:type_name -> api.v1.PluginSpecifications.Parameter.Validation.Type - 8, // 60: api.v1.PipelineService.ListPipelines:input_type -> api.v1.ListPipelinesRequest - 10, // 61: api.v1.PipelineService.CreatePipeline:input_type -> api.v1.CreatePipelineRequest - 12, // 62: api.v1.PipelineService.GetPipeline:input_type -> api.v1.GetPipelineRequest - 14, // 63: api.v1.PipelineService.UpdatePipeline:input_type -> api.v1.UpdatePipelineRequest - 16, // 64: api.v1.PipelineService.DeletePipeline:input_type -> api.v1.DeletePipelineRequest - 18, // 65: api.v1.PipelineService.StartPipeline:input_type -> api.v1.StartPipelineRequest - 20, // 66: api.v1.PipelineService.StopPipeline:input_type -> api.v1.StopPipelineRequest - 22, // 67: api.v1.PipelineService.GetDLQ:input_type -> api.v1.GetDLQRequest - 24, // 68: api.v1.PipelineService.UpdateDLQ:input_type -> api.v1.UpdateDLQRequest - 26, // 69: api.v1.PipelineService.ExportPipeline:input_type -> api.v1.ExportPipelineRequest - 28, // 70: api.v1.PipelineService.ImportPipeline:input_type -> api.v1.ImportPipelineRequest - 34, // 71: api.v1.ConnectorService.ListConnectors:input_type -> api.v1.ListConnectorsRequest - 36, // 72: api.v1.ConnectorService.InspectConnector:input_type -> api.v1.InspectConnectorRequest - 38, // 73: api.v1.ConnectorService.GetConnector:input_type -> api.v1.GetConnectorRequest - 30, // 74: api.v1.ConnectorService.CreateConnector:input_type -> api.v1.CreateConnectorRequest - 32, // 75: api.v1.ConnectorService.ValidateConnector:input_type -> api.v1.ValidateConnectorRequest - 40, // 76: api.v1.ConnectorService.UpdateConnector:input_type -> api.v1.UpdateConnectorRequest - 42, // 77: api.v1.ConnectorService.DeleteConnector:input_type -> api.v1.DeleteConnectorRequest - 44, // 78: api.v1.ProcessorService.ListProcessors:input_type -> api.v1.ListProcessorsRequest - 46, // 79: api.v1.ProcessorService.InspectProcessorIn:input_type -> api.v1.InspectProcessorInRequest - 48, // 80: api.v1.ProcessorService.InspectProcessorOut:input_type -> api.v1.InspectProcessorOutRequest - 52, // 81: api.v1.ProcessorService.GetProcessor:input_type -> api.v1.GetProcessorRequest - 50, // 82: api.v1.ProcessorService.CreateProcessor:input_type -> api.v1.CreateProcessorRequest - 54, // 83: api.v1.ProcessorService.UpdateProcessor:input_type -> api.v1.UpdateProcessorRequest - 56, // 84: api.v1.ProcessorService.DeleteProcessor:input_type -> api.v1.DeleteProcessorRequest - 58, // 85: api.v1.InformationService.GetInfo:input_type -> api.v1.GetInfoRequest - 61, // 86: api.v1.PluginService.ListPlugins:input_type -> api.v1.ListPluginsRequest - 9, // 87: api.v1.PipelineService.ListPipelines:output_type -> api.v1.ListPipelinesResponse - 11, // 88: api.v1.PipelineService.CreatePipeline:output_type -> api.v1.CreatePipelineResponse - 13, // 89: api.v1.PipelineService.GetPipeline:output_type -> api.v1.GetPipelineResponse - 15, // 90: api.v1.PipelineService.UpdatePipeline:output_type -> api.v1.UpdatePipelineResponse - 17, // 91: api.v1.PipelineService.DeletePipeline:output_type -> api.v1.DeletePipelineResponse - 19, // 92: api.v1.PipelineService.StartPipeline:output_type -> api.v1.StartPipelineResponse - 21, // 93: api.v1.PipelineService.StopPipeline:output_type -> api.v1.StopPipelineResponse - 23, // 94: api.v1.PipelineService.GetDLQ:output_type -> api.v1.GetDLQResponse - 25, // 95: api.v1.PipelineService.UpdateDLQ:output_type -> api.v1.UpdateDLQResponse - 27, // 96: api.v1.PipelineService.ExportPipeline:output_type -> api.v1.ExportPipelineResponse - 29, // 97: api.v1.PipelineService.ImportPipeline:output_type -> api.v1.ImportPipelineResponse - 35, // 98: api.v1.ConnectorService.ListConnectors:output_type -> api.v1.ListConnectorsResponse - 37, // 99: api.v1.ConnectorService.InspectConnector:output_type -> api.v1.InspectConnectorResponse - 39, // 100: api.v1.ConnectorService.GetConnector:output_type -> api.v1.GetConnectorResponse - 31, // 101: api.v1.ConnectorService.CreateConnector:output_type -> api.v1.CreateConnectorResponse - 33, // 102: api.v1.ConnectorService.ValidateConnector:output_type -> api.v1.ValidateConnectorResponse - 41, // 103: api.v1.ConnectorService.UpdateConnector:output_type -> api.v1.UpdateConnectorResponse - 43, // 104: api.v1.ConnectorService.DeleteConnector:output_type -> api.v1.DeleteConnectorResponse - 45, // 105: api.v1.ProcessorService.ListProcessors:output_type -> api.v1.ListProcessorsResponse - 47, // 106: api.v1.ProcessorService.InspectProcessorIn:output_type -> api.v1.InspectProcessorInResponse - 49, // 107: api.v1.ProcessorService.InspectProcessorOut:output_type -> api.v1.InspectProcessorOutResponse - 53, // 108: api.v1.ProcessorService.GetProcessor:output_type -> api.v1.GetProcessorResponse - 51, // 109: api.v1.ProcessorService.CreateProcessor:output_type -> api.v1.CreateProcessorResponse - 55, // 110: api.v1.ProcessorService.UpdateProcessor:output_type -> api.v1.UpdateProcessorResponse - 57, // 111: api.v1.ProcessorService.DeleteProcessor:output_type -> api.v1.DeleteProcessorResponse - 59, // 112: api.v1.InformationService.GetInfo:output_type -> api.v1.GetInfoResponse - 62, // 113: api.v1.PluginService.ListPlugins:output_type -> api.v1.ListPluginsResponse - 87, // [87:114] is the sub-list for method output_type - 60, // [60:87] is the sub-list for method input_type - 60, // [60:60] is the sub-list for extension type_name - 60, // [60:60] is the sub-list for extension extendee - 0, // [0:60] is the sub-list for field type_name + 89, // 8: api.v1.Connector.created_at:type_name -> google.protobuf.Timestamp + 89, // 9: api.v1.Connector.updated_at:type_name -> google.protobuf.Timestamp + 80, // 10: api.v1.Processor.config:type_name -> api.v1.Processor.Config + 79, // 11: api.v1.Processor.parent:type_name -> api.v1.Processor.Parent + 89, // 12: api.v1.Processor.created_at:type_name -> google.protobuf.Timestamp + 89, // 13: api.v1.Processor.updated_at:type_name -> google.protobuf.Timestamp + 82, // 14: api.v1.ConnectorPluginSpecifications.destination_params:type_name -> api.v1.ConnectorPluginSpecifications.DestinationParamsEntry + 83, // 15: api.v1.ConnectorPluginSpecifications.source_params:type_name -> api.v1.ConnectorPluginSpecifications.SourceParamsEntry + 84, // 16: api.v1.ProcessorPluginSpecifications.parameters:type_name -> api.v1.ProcessorPluginSpecifications.ParametersEntry + 86, // 17: api.v1.PluginSpecifications.destination_params:type_name -> api.v1.PluginSpecifications.DestinationParamsEntry + 87, // 18: api.v1.PluginSpecifications.source_params:type_name -> api.v1.PluginSpecifications.SourceParamsEntry + 5, // 19: api.v1.ListPipelinesResponse.pipelines:type_name -> api.v1.Pipeline + 71, // 20: api.v1.CreatePipelineRequest.config:type_name -> api.v1.Pipeline.Config + 5, // 21: api.v1.CreatePipelineResponse.pipeline:type_name -> api.v1.Pipeline + 5, // 22: api.v1.GetPipelineResponse.pipeline:type_name -> api.v1.Pipeline + 71, // 23: api.v1.UpdatePipelineRequest.config:type_name -> api.v1.Pipeline.Config + 5, // 24: api.v1.UpdatePipelineResponse.pipeline:type_name -> api.v1.Pipeline + 72, // 25: api.v1.GetDLQResponse.dlq:type_name -> api.v1.Pipeline.DLQ + 72, // 26: api.v1.UpdateDLQRequest.dlq:type_name -> api.v1.Pipeline.DLQ + 72, // 27: api.v1.UpdateDLQResponse.dlq:type_name -> api.v1.Pipeline.DLQ + 5, // 28: api.v1.ExportPipelineResponse.pipeline:type_name -> api.v1.Pipeline + 5, // 29: api.v1.ImportPipelineRequest.pipeline:type_name -> api.v1.Pipeline + 5, // 30: api.v1.ImportPipelineResponse.pipeline:type_name -> api.v1.Pipeline + 1, // 31: api.v1.CreateConnectorRequest.type:type_name -> api.v1.Connector.Type + 76, // 32: api.v1.CreateConnectorRequest.config:type_name -> api.v1.Connector.Config + 6, // 33: api.v1.CreateConnectorResponse.connector:type_name -> api.v1.Connector + 1, // 34: api.v1.ValidateConnectorRequest.type:type_name -> api.v1.Connector.Type + 76, // 35: api.v1.ValidateConnectorRequest.config:type_name -> api.v1.Connector.Config + 6, // 36: api.v1.ListConnectorsResponse.connectors:type_name -> api.v1.Connector + 90, // 37: api.v1.InspectConnectorResponse.record:type_name -> opencdc.v1.Record + 6, // 38: api.v1.GetConnectorResponse.connector:type_name -> api.v1.Connector + 76, // 39: api.v1.UpdateConnectorRequest.config:type_name -> api.v1.Connector.Config + 6, // 40: api.v1.UpdateConnectorResponse.connector:type_name -> api.v1.Connector + 8, // 41: api.v1.ListConnectorPluginsResponse.plugins:type_name -> api.v1.ConnectorPluginSpecifications + 7, // 42: api.v1.ListProcessorsResponse.processors:type_name -> api.v1.Processor + 90, // 43: api.v1.InspectProcessorInResponse.record:type_name -> opencdc.v1.Record + 90, // 44: api.v1.InspectProcessorOutResponse.record:type_name -> opencdc.v1.Record + 79, // 45: api.v1.CreateProcessorRequest.parent:type_name -> api.v1.Processor.Parent + 80, // 46: api.v1.CreateProcessorRequest.config:type_name -> api.v1.Processor.Config + 7, // 47: api.v1.CreateProcessorResponse.processor:type_name -> api.v1.Processor + 7, // 48: api.v1.GetProcessorResponse.processor:type_name -> api.v1.Processor + 80, // 49: api.v1.UpdateProcessorRequest.config:type_name -> api.v1.Processor.Config + 7, // 50: api.v1.UpdateProcessorResponse.processor:type_name -> api.v1.Processor + 9, // 51: api.v1.ListProcessorPluginsResponse.plugins:type_name -> api.v1.ProcessorPluginSpecifications + 67, // 52: api.v1.GetInfoResponse.info:type_name -> api.v1.Info + 10, // 53: api.v1.ListPluginsResponse.plugins:type_name -> api.v1.PluginSpecifications + 0, // 54: api.v1.Pipeline.State.status:type_name -> api.v1.Pipeline.Status + 73, // 55: api.v1.Pipeline.DLQ.settings:type_name -> api.v1.Pipeline.DLQ.SettingsEntry + 77, // 56: api.v1.Connector.DestinationState.positions:type_name -> api.v1.Connector.DestinationState.PositionsEntry + 78, // 57: api.v1.Connector.Config.settings:type_name -> api.v1.Connector.Config.SettingsEntry + 2, // 58: api.v1.Processor.Parent.type:type_name -> api.v1.Processor.Parent.Type + 81, // 59: api.v1.Processor.Config.settings:type_name -> api.v1.Processor.Config.SettingsEntry + 91, // 60: api.v1.ConnectorPluginSpecifications.DestinationParamsEntry.value:type_name -> config.v1.Parameter + 91, // 61: api.v1.ConnectorPluginSpecifications.SourceParamsEntry.value:type_name -> config.v1.Parameter + 91, // 62: api.v1.ProcessorPluginSpecifications.ParametersEntry.value:type_name -> config.v1.Parameter + 3, // 63: api.v1.PluginSpecifications.Parameter.type:type_name -> api.v1.PluginSpecifications.Parameter.Type + 88, // 64: api.v1.PluginSpecifications.Parameter.validations:type_name -> api.v1.PluginSpecifications.Parameter.Validation + 85, // 65: api.v1.PluginSpecifications.DestinationParamsEntry.value:type_name -> api.v1.PluginSpecifications.Parameter + 85, // 66: api.v1.PluginSpecifications.SourceParamsEntry.value:type_name -> api.v1.PluginSpecifications.Parameter + 4, // 67: api.v1.PluginSpecifications.Parameter.Validation.type:type_name -> api.v1.PluginSpecifications.Parameter.Validation.Type + 11, // 68: api.v1.PipelineService.ListPipelines:input_type -> api.v1.ListPipelinesRequest + 13, // 69: api.v1.PipelineService.CreatePipeline:input_type -> api.v1.CreatePipelineRequest + 15, // 70: api.v1.PipelineService.GetPipeline:input_type -> api.v1.GetPipelineRequest + 17, // 71: api.v1.PipelineService.UpdatePipeline:input_type -> api.v1.UpdatePipelineRequest + 19, // 72: api.v1.PipelineService.DeletePipeline:input_type -> api.v1.DeletePipelineRequest + 21, // 73: api.v1.PipelineService.StartPipeline:input_type -> api.v1.StartPipelineRequest + 23, // 74: api.v1.PipelineService.StopPipeline:input_type -> api.v1.StopPipelineRequest + 25, // 75: api.v1.PipelineService.GetDLQ:input_type -> api.v1.GetDLQRequest + 27, // 76: api.v1.PipelineService.UpdateDLQ:input_type -> api.v1.UpdateDLQRequest + 29, // 77: api.v1.PipelineService.ExportPipeline:input_type -> api.v1.ExportPipelineRequest + 31, // 78: api.v1.PipelineService.ImportPipeline:input_type -> api.v1.ImportPipelineRequest + 37, // 79: api.v1.ConnectorService.ListConnectors:input_type -> api.v1.ListConnectorsRequest + 39, // 80: api.v1.ConnectorService.InspectConnector:input_type -> api.v1.InspectConnectorRequest + 41, // 81: api.v1.ConnectorService.GetConnector:input_type -> api.v1.GetConnectorRequest + 33, // 82: api.v1.ConnectorService.CreateConnector:input_type -> api.v1.CreateConnectorRequest + 35, // 83: api.v1.ConnectorService.ValidateConnector:input_type -> api.v1.ValidateConnectorRequest + 43, // 84: api.v1.ConnectorService.UpdateConnector:input_type -> api.v1.UpdateConnectorRequest + 45, // 85: api.v1.ConnectorService.DeleteConnector:input_type -> api.v1.DeleteConnectorRequest + 47, // 86: api.v1.ConnectorService.ListConnectorPlugins:input_type -> api.v1.ListConnectorPluginsRequest + 49, // 87: api.v1.ProcessorService.ListProcessors:input_type -> api.v1.ListProcessorsRequest + 51, // 88: api.v1.ProcessorService.InspectProcessorIn:input_type -> api.v1.InspectProcessorInRequest + 53, // 89: api.v1.ProcessorService.InspectProcessorOut:input_type -> api.v1.InspectProcessorOutRequest + 57, // 90: api.v1.ProcessorService.GetProcessor:input_type -> api.v1.GetProcessorRequest + 55, // 91: api.v1.ProcessorService.CreateProcessor:input_type -> api.v1.CreateProcessorRequest + 59, // 92: api.v1.ProcessorService.UpdateProcessor:input_type -> api.v1.UpdateProcessorRequest + 61, // 93: api.v1.ProcessorService.DeleteProcessor:input_type -> api.v1.DeleteProcessorRequest + 63, // 94: api.v1.ProcessorService.ListProcessorPlugins:input_type -> api.v1.ListProcessorPluginsRequest + 65, // 95: api.v1.InformationService.GetInfo:input_type -> api.v1.GetInfoRequest + 68, // 96: api.v1.PluginService.ListPlugins:input_type -> api.v1.ListPluginsRequest + 12, // 97: api.v1.PipelineService.ListPipelines:output_type -> api.v1.ListPipelinesResponse + 14, // 98: api.v1.PipelineService.CreatePipeline:output_type -> api.v1.CreatePipelineResponse + 16, // 99: api.v1.PipelineService.GetPipeline:output_type -> api.v1.GetPipelineResponse + 18, // 100: api.v1.PipelineService.UpdatePipeline:output_type -> api.v1.UpdatePipelineResponse + 20, // 101: api.v1.PipelineService.DeletePipeline:output_type -> api.v1.DeletePipelineResponse + 22, // 102: api.v1.PipelineService.StartPipeline:output_type -> api.v1.StartPipelineResponse + 24, // 103: api.v1.PipelineService.StopPipeline:output_type -> api.v1.StopPipelineResponse + 26, // 104: api.v1.PipelineService.GetDLQ:output_type -> api.v1.GetDLQResponse + 28, // 105: api.v1.PipelineService.UpdateDLQ:output_type -> api.v1.UpdateDLQResponse + 30, // 106: api.v1.PipelineService.ExportPipeline:output_type -> api.v1.ExportPipelineResponse + 32, // 107: api.v1.PipelineService.ImportPipeline:output_type -> api.v1.ImportPipelineResponse + 38, // 108: api.v1.ConnectorService.ListConnectors:output_type -> api.v1.ListConnectorsResponse + 40, // 109: api.v1.ConnectorService.InspectConnector:output_type -> api.v1.InspectConnectorResponse + 42, // 110: api.v1.ConnectorService.GetConnector:output_type -> api.v1.GetConnectorResponse + 34, // 111: api.v1.ConnectorService.CreateConnector:output_type -> api.v1.CreateConnectorResponse + 36, // 112: api.v1.ConnectorService.ValidateConnector:output_type -> api.v1.ValidateConnectorResponse + 44, // 113: api.v1.ConnectorService.UpdateConnector:output_type -> api.v1.UpdateConnectorResponse + 46, // 114: api.v1.ConnectorService.DeleteConnector:output_type -> api.v1.DeleteConnectorResponse + 48, // 115: api.v1.ConnectorService.ListConnectorPlugins:output_type -> api.v1.ListConnectorPluginsResponse + 50, // 116: api.v1.ProcessorService.ListProcessors:output_type -> api.v1.ListProcessorsResponse + 52, // 117: api.v1.ProcessorService.InspectProcessorIn:output_type -> api.v1.InspectProcessorInResponse + 54, // 118: api.v1.ProcessorService.InspectProcessorOut:output_type -> api.v1.InspectProcessorOutResponse + 58, // 119: api.v1.ProcessorService.GetProcessor:output_type -> api.v1.GetProcessorResponse + 56, // 120: api.v1.ProcessorService.CreateProcessor:output_type -> api.v1.CreateProcessorResponse + 60, // 121: api.v1.ProcessorService.UpdateProcessor:output_type -> api.v1.UpdateProcessorResponse + 62, // 122: api.v1.ProcessorService.DeleteProcessor:output_type -> api.v1.DeleteProcessorResponse + 64, // 123: api.v1.ProcessorService.ListProcessorPlugins:output_type -> api.v1.ListProcessorPluginsResponse + 66, // 124: api.v1.InformationService.GetInfo:output_type -> api.v1.GetInfoResponse + 69, // 125: api.v1.PluginService.ListPlugins:output_type -> api.v1.ListPluginsResponse + 97, // [97:126] is the sub-list for method output_type + 68, // [68:97] is the sub-list for method input_type + 68, // [68:68] is the sub-list for extension type_name + 68, // [68:68] is the sub-list for extension extendee + 0, // [0:68] is the sub-list for field type_name } func init() { file_api_v1_api_proto_init() } @@ -5141,7 +5694,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelinesRequest); i { + switch v := v.(*ConnectorPluginSpecifications); i { case 0: return &v.state case 1: @@ -5153,7 +5706,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPipelinesResponse); i { + switch v := v.(*ProcessorPluginSpecifications); i { case 0: return &v.state case 1: @@ -5165,7 +5718,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreatePipelineRequest); i { + switch v := v.(*PluginSpecifications); i { case 0: return &v.state case 1: @@ -5177,7 +5730,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreatePipelineResponse); i { + switch v := v.(*ListPipelinesRequest); i { case 0: return &v.state case 1: @@ -5189,7 +5742,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineRequest); i { + switch v := v.(*ListPipelinesResponse); i { case 0: return &v.state case 1: @@ -5201,7 +5754,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPipelineResponse); i { + switch v := v.(*CreatePipelineRequest); i { case 0: return &v.state case 1: @@ -5213,7 +5766,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdatePipelineRequest); i { + switch v := v.(*CreatePipelineResponse); i { case 0: return &v.state case 1: @@ -5225,7 +5778,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdatePipelineResponse); i { + switch v := v.(*GetPipelineRequest); i { case 0: return &v.state case 1: @@ -5237,7 +5790,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeletePipelineRequest); i { + switch v := v.(*GetPipelineResponse); i { case 0: return &v.state case 1: @@ -5249,7 +5802,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeletePipelineResponse); i { + switch v := v.(*UpdatePipelineRequest); i { case 0: return &v.state case 1: @@ -5261,7 +5814,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StartPipelineRequest); i { + switch v := v.(*UpdatePipelineResponse); i { case 0: return &v.state case 1: @@ -5273,7 +5826,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StartPipelineResponse); i { + switch v := v.(*DeletePipelineRequest); i { case 0: return &v.state case 1: @@ -5285,7 +5838,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StopPipelineRequest); i { + switch v := v.(*DeletePipelineResponse); i { case 0: return &v.state case 1: @@ -5297,7 +5850,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*StopPipelineResponse); i { + switch v := v.(*StartPipelineRequest); i { case 0: return &v.state case 1: @@ -5309,7 +5862,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetDLQRequest); i { + switch v := v.(*StartPipelineResponse); i { case 0: return &v.state case 1: @@ -5321,7 +5874,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetDLQResponse); i { + switch v := v.(*StopPipelineRequest); i { case 0: return &v.state case 1: @@ -5333,7 +5886,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateDLQRequest); i { + switch v := v.(*StopPipelineResponse); i { case 0: return &v.state case 1: @@ -5345,7 +5898,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateDLQResponse); i { + switch v := v.(*GetDLQRequest); i { case 0: return &v.state case 1: @@ -5357,7 +5910,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ExportPipelineRequest); i { + switch v := v.(*GetDLQResponse); i { case 0: return &v.state case 1: @@ -5369,7 +5922,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ExportPipelineResponse); i { + switch v := v.(*UpdateDLQRequest); i { case 0: return &v.state case 1: @@ -5381,7 +5934,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ImportPipelineRequest); i { + switch v := v.(*UpdateDLQResponse); i { case 0: return &v.state case 1: @@ -5393,7 +5946,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ImportPipelineResponse); i { + switch v := v.(*ExportPipelineRequest); i { case 0: return &v.state case 1: @@ -5405,7 +5958,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateConnectorRequest); i { + switch v := v.(*ExportPipelineResponse); i { case 0: return &v.state case 1: @@ -5417,7 +5970,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateConnectorResponse); i { + switch v := v.(*ImportPipelineRequest); i { case 0: return &v.state case 1: @@ -5429,7 +5982,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ValidateConnectorRequest); i { + switch v := v.(*ImportPipelineResponse); i { case 0: return &v.state case 1: @@ -5441,7 +5994,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ValidateConnectorResponse); i { + switch v := v.(*CreateConnectorRequest); i { case 0: return &v.state case 1: @@ -5453,7 +6006,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListConnectorsRequest); i { + switch v := v.(*CreateConnectorResponse); i { case 0: return &v.state case 1: @@ -5465,7 +6018,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListConnectorsResponse); i { + switch v := v.(*ValidateConnectorRequest); i { case 0: return &v.state case 1: @@ -5477,7 +6030,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*InspectConnectorRequest); i { + switch v := v.(*ValidateConnectorResponse); i { case 0: return &v.state case 1: @@ -5489,7 +6042,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*InspectConnectorResponse); i { + switch v := v.(*ListConnectorsRequest); i { case 0: return &v.state case 1: @@ -5501,7 +6054,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[33].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetConnectorRequest); i { + switch v := v.(*ListConnectorsResponse); i { case 0: return &v.state case 1: @@ -5513,7 +6066,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[34].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetConnectorResponse); i { + switch v := v.(*InspectConnectorRequest); i { case 0: return &v.state case 1: @@ -5525,7 +6078,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[35].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateConnectorRequest); i { + switch v := v.(*InspectConnectorResponse); i { case 0: return &v.state case 1: @@ -5537,7 +6090,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[36].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateConnectorResponse); i { + switch v := v.(*GetConnectorRequest); i { case 0: return &v.state case 1: @@ -5549,7 +6102,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[37].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteConnectorRequest); i { + switch v := v.(*GetConnectorResponse); i { case 0: return &v.state case 1: @@ -5561,7 +6114,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[38].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteConnectorResponse); i { + switch v := v.(*UpdateConnectorRequest); i { case 0: return &v.state case 1: @@ -5573,7 +6126,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[39].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListProcessorsRequest); i { + switch v := v.(*UpdateConnectorResponse); i { case 0: return &v.state case 1: @@ -5585,7 +6138,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[40].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListProcessorsResponse); i { + switch v := v.(*DeleteConnectorRequest); i { case 0: return &v.state case 1: @@ -5597,7 +6150,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[41].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*InspectProcessorInRequest); i { + switch v := v.(*DeleteConnectorResponse); i { case 0: return &v.state case 1: @@ -5609,7 +6162,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[42].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*InspectProcessorInResponse); i { + switch v := v.(*ListConnectorPluginsRequest); i { case 0: return &v.state case 1: @@ -5621,7 +6174,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[43].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*InspectProcessorOutRequest); i { + switch v := v.(*ListConnectorPluginsResponse); i { case 0: return &v.state case 1: @@ -5633,7 +6186,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[44].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*InspectProcessorOutResponse); i { + switch v := v.(*ListProcessorsRequest); i { case 0: return &v.state case 1: @@ -5645,7 +6198,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[45].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateProcessorRequest); i { + switch v := v.(*ListProcessorsResponse); i { case 0: return &v.state case 1: @@ -5657,7 +6210,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[46].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*CreateProcessorResponse); i { + switch v := v.(*InspectProcessorInRequest); i { case 0: return &v.state case 1: @@ -5669,7 +6222,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[47].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetProcessorRequest); i { + switch v := v.(*InspectProcessorInResponse); i { case 0: return &v.state case 1: @@ -5681,7 +6234,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[48].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetProcessorResponse); i { + switch v := v.(*InspectProcessorOutRequest); i { case 0: return &v.state case 1: @@ -5693,7 +6246,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[49].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateProcessorRequest); i { + switch v := v.(*InspectProcessorOutResponse); i { case 0: return &v.state case 1: @@ -5705,7 +6258,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[50].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*UpdateProcessorResponse); i { + switch v := v.(*CreateProcessorRequest); i { case 0: return &v.state case 1: @@ -5717,7 +6270,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[51].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteProcessorRequest); i { + switch v := v.(*CreateProcessorResponse); i { case 0: return &v.state case 1: @@ -5729,7 +6282,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[52].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*DeleteProcessorResponse); i { + switch v := v.(*GetProcessorRequest); i { case 0: return &v.state case 1: @@ -5741,7 +6294,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[53].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetInfoRequest); i { + switch v := v.(*GetProcessorResponse); i { case 0: return &v.state case 1: @@ -5753,7 +6306,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[54].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetInfoResponse); i { + switch v := v.(*UpdateProcessorRequest); i { case 0: return &v.state case 1: @@ -5765,7 +6318,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[55].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Info); i { + switch v := v.(*UpdateProcessorResponse); i { case 0: return &v.state case 1: @@ -5777,7 +6330,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[56].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPluginsRequest); i { + switch v := v.(*DeleteProcessorRequest); i { case 0: return &v.state case 1: @@ -5789,7 +6342,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[57].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListPluginsResponse); i { + switch v := v.(*DeleteProcessorResponse); i { case 0: return &v.state case 1: @@ -5801,7 +6354,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[58].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PluginSpecifications); i { + switch v := v.(*ListProcessorPluginsRequest); i { case 0: return &v.state case 1: @@ -5813,7 +6366,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[59].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Pipeline_State); i { + switch v := v.(*ListProcessorPluginsResponse); i { case 0: return &v.state case 1: @@ -5825,7 +6378,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[60].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Pipeline_Config); i { + switch v := v.(*GetInfoRequest); i { case 0: return &v.state case 1: @@ -5837,7 +6390,19 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[61].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Pipeline_DLQ); i { + switch v := v.(*GetInfoResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_v1_api_proto_msgTypes[62].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Info); i { case 0: return &v.state case 1: @@ -5849,7 +6414,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[63].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Connector_SourceState); i { + switch v := v.(*ListPluginsRequest); i { case 0: return &v.state case 1: @@ -5861,7 +6426,7 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[64].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Connector_DestinationState); i { + switch v := v.(*ListPluginsResponse); i { case 0: return &v.state case 1: @@ -5873,6 +6438,66 @@ func file_api_v1_api_proto_init() { } } file_api_v1_api_proto_msgTypes[65].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Pipeline_State); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_v1_api_proto_msgTypes[66].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Pipeline_Config); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_v1_api_proto_msgTypes[67].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Pipeline_DLQ); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_v1_api_proto_msgTypes[69].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Connector_SourceState); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_v1_api_proto_msgTypes[70].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Connector_DestinationState); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_api_v1_api_proto_msgTypes[71].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Connector_Config); i { case 0: return &v.state @@ -5884,7 +6509,7 @@ func file_api_v1_api_proto_init() { return nil } } - file_api_v1_api_proto_msgTypes[68].Exporter = func(v interface{}, i int) interface{} { + file_api_v1_api_proto_msgTypes[74].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Processor_Parent); i { case 0: return &v.state @@ -5896,7 +6521,7 @@ func file_api_v1_api_proto_init() { return nil } } - file_api_v1_api_proto_msgTypes[69].Exporter = func(v interface{}, i int) interface{} { + file_api_v1_api_proto_msgTypes[75].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Processor_Config); i { case 0: return &v.state @@ -5908,7 +6533,7 @@ func file_api_v1_api_proto_init() { return nil } } - file_api_v1_api_proto_msgTypes[71].Exporter = func(v interface{}, i int) interface{} { + file_api_v1_api_proto_msgTypes[80].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PluginSpecifications_Parameter); i { case 0: return &v.state @@ -5920,7 +6545,7 @@ func file_api_v1_api_proto_init() { return nil } } - file_api_v1_api_proto_msgTypes[74].Exporter = func(v interface{}, i int) interface{} { + file_api_v1_api_proto_msgTypes[83].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*PluginSpecifications_Parameter_Validation); i { case 0: return &v.state @@ -5943,7 +6568,7 @@ func file_api_v1_api_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_api_v1_api_proto_rawDesc, NumEnums: 5, - NumMessages: 75, + NumMessages: 84, NumExtensions: 0, NumServices: 5, }, diff --git a/proto/api/v1/api.pb.gw.go b/proto/api/v1/api.pb.gw.go index 29da3a417..5540c3b60 100644 --- a/proto/api/v1/api.pb.gw.go +++ b/proto/api/v1/api.pb.gw.go @@ -909,6 +909,42 @@ func local_request_ConnectorService_DeleteConnector_0(ctx context.Context, marsh } +var ( + filter_ConnectorService_ListConnectorPlugins_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_ConnectorService_ListConnectorPlugins_0(ctx context.Context, marshaler runtime.Marshaler, client ConnectorServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListConnectorPluginsRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ConnectorService_ListConnectorPlugins_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListConnectorPlugins(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_ConnectorService_ListConnectorPlugins_0(ctx context.Context, marshaler runtime.Marshaler, server ConnectorServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListConnectorPluginsRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ConnectorService_ListConnectorPlugins_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.ListConnectorPlugins(ctx, &protoReq) + return msg, metadata, err + +} + var ( filter_ProcessorService_ListProcessors_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} ) @@ -1219,6 +1255,42 @@ func local_request_ProcessorService_DeleteProcessor_0(ctx context.Context, marsh } +var ( + filter_ProcessorService_ListProcessorPlugins_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_ProcessorService_ListProcessorPlugins_0(ctx context.Context, marshaler runtime.Marshaler, client ProcessorServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListProcessorPluginsRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ProcessorService_ListProcessorPlugins_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.ListProcessorPlugins(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_ProcessorService_ListProcessorPlugins_0(ctx context.Context, marshaler runtime.Marshaler, server ProcessorServiceServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq ListProcessorPluginsRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_ProcessorService_ListProcessorPlugins_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.ListProcessorPlugins(ctx, &protoReq) + return msg, metadata, err + +} + func request_InformationService_GetInfo_0(ctx context.Context, marshaler runtime.Marshaler, client InformationServiceClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { var protoReq GetInfoRequest var metadata runtime.ServerMetadata @@ -1720,6 +1792,31 @@ func RegisterConnectorServiceHandlerServer(ctx context.Context, mux *runtime.Ser }) + mux.Handle("GET", pattern_ConnectorService_ListConnectorPlugins_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/api.v1.ConnectorService/ListConnectorPlugins", runtime.WithHTTPPathPattern("/v1/connectors/plugins")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ConnectorService_ListConnectorPlugins_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_ConnectorService_ListConnectorPlugins_0(annotatedContext, mux, outboundMarshaler, w, req, response_ConnectorService_ListConnectorPlugins_0{resp}, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -1868,6 +1965,31 @@ func RegisterProcessorServiceHandlerServer(ctx context.Context, mux *runtime.Ser }) + mux.Handle("GET", pattern_ProcessorService_ListProcessorPlugins_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/api.v1.ProcessorService/ListProcessorPlugins", runtime.WithHTTPPathPattern("/v1/processors/plugins")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_ProcessorService_ListProcessorPlugins_0(annotatedContext, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_ProcessorService_ListProcessorPlugins_0(annotatedContext, mux, outboundMarshaler, w, req, response_ProcessorService_ListProcessorPlugins_0{resp}, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -2537,6 +2659,28 @@ func RegisterConnectorServiceHandlerClient(ctx context.Context, mux *runtime.Ser }) + mux.Handle("GET", pattern_ConnectorService_ListConnectorPlugins_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/api.v1.ConnectorService/ListConnectorPlugins", runtime.WithHTTPPathPattern("/v1/connectors/plugins")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ConnectorService_ListConnectorPlugins_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_ConnectorService_ListConnectorPlugins_0(annotatedContext, mux, outboundMarshaler, w, req, response_ConnectorService_ListConnectorPlugins_0{resp}, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -2585,6 +2729,15 @@ func (m response_ConnectorService_UpdateConnector_0) XXX_ResponseBody() interfac return response.Connector } +type response_ConnectorService_ListConnectorPlugins_0 struct { + proto.Message +} + +func (m response_ConnectorService_ListConnectorPlugins_0) XXX_ResponseBody() interface{} { + response := m.Message.(*ListConnectorPluginsResponse) + return response.Plugins +} + var ( pattern_ConnectorService_ListConnectors_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "connectors"}, "")) @@ -2599,6 +2752,8 @@ var ( pattern_ConnectorService_UpdateConnector_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2}, []string{"v1", "connectors", "id"}, "")) pattern_ConnectorService_DeleteConnector_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2}, []string{"v1", "connectors", "id"}, "")) + + pattern_ConnectorService_ListConnectorPlugins_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "connectors", "plugins"}, "")) ) var ( @@ -2615,6 +2770,8 @@ var ( forward_ConnectorService_UpdateConnector_0 = runtime.ForwardResponseMessage forward_ConnectorService_DeleteConnector_0 = runtime.ForwardResponseMessage + + forward_ConnectorService_ListConnectorPlugins_0 = runtime.ForwardResponseMessage ) // RegisterProcessorServiceHandlerFromEndpoint is same as RegisterProcessorServiceHandler but @@ -2815,6 +2972,28 @@ func RegisterProcessorServiceHandlerClient(ctx context.Context, mux *runtime.Ser }) + mux.Handle("GET", pattern_ProcessorService_ListProcessorPlugins_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + var err error + var annotatedContext context.Context + annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/api.v1.ProcessorService/ListProcessorPlugins", runtime.WithHTTPPathPattern("/v1/processors/plugins")) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_ProcessorService_ListProcessorPlugins_0(annotatedContext, inboundMarshaler, client, req, pathParams) + annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md) + if err != nil { + runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err) + return + } + + forward_ProcessorService_ListProcessorPlugins_0(annotatedContext, mux, outboundMarshaler, w, req, response_ProcessorService_ListProcessorPlugins_0{resp}, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -2872,6 +3051,15 @@ func (m response_ProcessorService_UpdateProcessor_0) XXX_ResponseBody() interfac return response.Processor } +type response_ProcessorService_ListProcessorPlugins_0 struct { + proto.Message +} + +func (m response_ProcessorService_ListProcessorPlugins_0) XXX_ResponseBody() interface{} { + response := m.Message.(*ListProcessorPluginsResponse) + return response.Plugins +} + var ( pattern_ProcessorService_ListProcessors_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1}, []string{"v1", "processors"}, "")) @@ -2886,6 +3074,8 @@ var ( pattern_ProcessorService_UpdateProcessor_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2}, []string{"v1", "processors", "id"}, "")) pattern_ProcessorService_DeleteProcessor_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2}, []string{"v1", "processors", "id"}, "")) + + pattern_ProcessorService_ListProcessorPlugins_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2}, []string{"v1", "processors", "plugins"}, "")) ) var ( @@ -2902,6 +3092,8 @@ var ( forward_ProcessorService_UpdateProcessor_0 = runtime.ForwardResponseMessage forward_ProcessorService_DeleteProcessor_0 = runtime.ForwardResponseMessage + + forward_ProcessorService_ListProcessorPlugins_0 = runtime.ForwardResponseMessage ) // RegisterInformationServiceHandlerFromEndpoint is same as RegisterInformationServiceHandler but diff --git a/proto/api/v1/api.proto b/proto/api/v1/api.proto index c7031c3a3..b1a9c003a 100644 --- a/proto/api/v1/api.proto +++ b/proto/api/v1/api.proto @@ -3,6 +3,7 @@ syntax = "proto3"; package api.v1; import "opencdc/v1/opencdc.proto"; +import "config/v1/parameter.proto"; import "google/api/field_behavior.proto"; import "google/api/annotations.proto"; import "protoc-gen-openapiv2/options/annotations.proto"; @@ -124,21 +125,138 @@ message Processor { string id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; reserved 2; reserved "state"; // message does not track state + reserved 4; reserved "type"; Config config = 3; - - // -- immutable fields ------------------------------------------------------- - - string type = 4 [(google.api.field_behavior) = IMMUTABLE]; - Parent parent = 6 [(google.api.field_behavior) = IMMUTABLE]; // Condition is a goTemplate formatted string, the value provided to the template is a sdk.Record, it should evaluate // to a boolean value, indicating a condition to run the processor for a specific record or not. (template functions // provided by `sprig` are injected) string condition = 9; + // -- immutable fields ------------------------------------------------------- + + string plugin = 5 [(google.api.field_behavior) = IMMUTABLE]; + Parent parent = 6 [(google.api.field_behavior) = IMMUTABLE]; + google.protobuf.Timestamp created_at = 7; google.protobuf.Timestamp updated_at = 8; } +// ConnectorPluginSpecifications describes the specifications of a connector plugin. +message ConnectorPluginSpecifications{ + // Name is the name of the plugin. + string name = 1; + // Summary is a brief description of the plugin and what it does, + // ideally not longer than one sentence. + string summary = 2; + // Description is a longer form field, appropriate for README-like + // text that the author can provide for documentation about the + // usage of the plugin. + string description = 3; + // Version string. Should follow semantic versioning and use the "v" + // prefix (e.g. v1.23.4). + string version = 4; + // Author declares the entity that created or maintains this plugin. + string author = 5; + // A map that describes parameters available for configuring the + // destination plugin. + map destination_params = 6; + // A map that describes parameters available for configuring the + // source plugin. + map source_params = 7; +} + +// ProcessorPluginSpecifications describes the specifications of a processor plugin. +message ProcessorPluginSpecifications{ + // Name is the name of the plugin. + string name = 1; + // Summary is a brief description of the plugin and what it does, + // ideally not longer than one sentence. + string summary = 2; + // Description is a longer form field, appropriate for README-like + // text that the author can provide for documentation about the + // usage of the plugin. + string description = 3; + // Version string. Should follow semantic versioning and use the "v" + // prefix (e.g. v1.23.4). + string version = 4; + // Author declares the entity that created or maintains this plugin. + string author = 5; + // A map that describes parameters available for configuring the + // processor plugin. + map parameters = 6; +} + +// Deprecated: use ConnectorPluginSpecifications instead. +message PluginSpecifications{ + option deprecated = true; + + // Deprecated: use config.v1.Parameter instead. + message Parameter { + option deprecated = true; + + // Deprecated: use config.v1.Validation instead. + message Validation{ + option deprecated = true; + + // Deprecated: use config.v1.Validation.Type instead. + enum Type { + option deprecated = true; + TYPE_UNSPECIFIED = 0; + // Parameter must be present. + TYPE_REQUIRED = 1; + // Parameter must be greater than {value}. + TYPE_GREATER_THAN = 2; + // Parameter must be less than {value}. + TYPE_LESS_THAN = 3; + // Parameter must be included in the comma separated list {value}. + TYPE_INCLUSION = 4; + // Parameter must not be included in the comma separated list {value}. + TYPE_EXCLUSION = 5; + // Parameter must match the regex {value}. + TYPE_REGEX = 6; + } + + Type type = 1; + // The value to be compared with the parameter, + // or a comma separated list in case of Validation.TYPE_INCLUSION or Validation.TYPE_EXCLUSION. + string value = 2; + } + + // Deprecated: use config.v1.Parameter.Type instead. + enum Type { + option deprecated = true; + TYPE_UNSPECIFIED = 0; + // Parameter is a string. + TYPE_STRING = 1; + // Parameter is an integer. + TYPE_INT = 2; + // Parameter is a float. + TYPE_FLOAT = 3; + // Parameter is a boolean. + TYPE_BOOL = 4; + // Parameter is a file. + TYPE_FILE = 5; + // Parameter is a duration. + TYPE_DURATION = 6; + } + + string description = 1; + string default = 2; + Type type = 3; + repeated Validation validations = 4; + } + + string name = 1; + string summary = 2; + string description = 3; + string version = 4; + string author = 5; + map destination_params = 6; + map source_params = 7; +} + +// -- services ----------------------------------------------------------------- + // PipelineService exposes functionality for managing pipelines. // Endpoints in this service can be used to create, fetch, modify or delete a // pipeline. Entities connected to a pipeline (e.g. processors, connectors) can @@ -651,11 +769,23 @@ service ConnectorService { } }; }; + + rpc ListConnectorPlugins(ListConnectorPluginsRequest) returns (ListConnectorPluginsResponse) { + option (google.api.http) = { + get: "/v1/connectors/plugins" + response_body: "plugins" + }; + }; } message CreateConnectorRequest { Connector.Type type = 1; - // Plugin name is the name of the builtin plugin, or the absolute path of a standalone plugin. + // Used to reference a plugin. Its format is as follows: + // [PLUGIN-TYPE:]PLUGIN-NAME[@VERSION] + // PLUGIN-TYPE: One of: builtin, standalone or any (default). + // PLUGIN-NAME: The name of the plugin as specified in the plugin specifications. + // VERSION: The plugin version as specified in the plugin specifications or latest (default). + // For more information, see: https://conduit.io/docs/connectors/referencing/ string plugin = 2; // ID of the pipeline to which the connector will get attached. string pipeline_id = 3; @@ -712,6 +842,15 @@ message DeleteConnectorRequest { } message DeleteConnectorResponse {} +message ListConnectorPluginsRequest { + // Regex to filter plugins by name. + string name = 1; +} + +message ListConnectorPluginsResponse{ + repeated ConnectorPluginSpecifications plugins = 1; +} + // ProcessorService exposes CRUD functionality for managing processors. service ProcessorService { rpc ListProcessors(ListProcessorsRequest) returns (ListProcessorsResponse) { @@ -857,6 +996,13 @@ service ProcessorService { } }; }; + + rpc ListProcessorPlugins(ListProcessorPluginsRequest) returns (ListProcessorPluginsResponse) { + option (google.api.http) = { + get: "/v1/processors/plugins" + response_body: "plugins" + }; + }; } message ListProcessorsRequest { @@ -882,10 +1028,11 @@ message InspectProcessorOutResponse { message CreateProcessorRequest { - string type = 1; + string type = 1 [deprecated = true]; Processor.Parent parent = 3; Processor.Config config = 4; string condition = 5; + string plugin = 6; } message CreateProcessorResponse { Processor processor = 1; @@ -911,6 +1058,15 @@ message DeleteProcessorRequest { } message DeleteProcessorResponse {} +message ListProcessorPluginsRequest { + // Regex to filter plugins by name. + string name = 1; +} + +message ListProcessorPluginsResponse{ + repeated ProcessorPluginSpecifications plugins = 1; +} + service InformationService { rpc GetInfo (GetInfoRequest) returns (GetInfoResponse) { option (google.api.http) = { @@ -963,8 +1119,11 @@ option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = { } }; +// Deprecated: use ConnectorService and ProcessorService instead. service PluginService { + // Deprecated: use ConnectorService.ListConnectorPlugins instead. rpc ListPlugins(ListPluginsRequest) returns (ListPluginsResponse) { + option deprecated = true; option (google.api.http) = { get: "/v1/plugins" response_body: "plugins" @@ -972,69 +1131,15 @@ service PluginService { }; } +// Deprecated: use ConnectorService.ListConnectorPlugins instead. message ListPluginsRequest { + option deprecated = true; // Regex to filter plugins by name. string name = 1; } +// Deprecated: use ConnectorService.ListConnectorPlugins instead. message ListPluginsResponse{ + option deprecated = true; repeated PluginSpecifications plugins = 1; } - -message PluginSpecifications{ - message Parameter { - // Validation to be made on the parameter. - message Validation{ - enum Type { - TYPE_UNSPECIFIED = 0; - // Parameter must be present. - TYPE_REQUIRED = 1; - // Parameter must be greater than {value}. - TYPE_GREATER_THAN = 2; - // Parameter must be less than {value}. - TYPE_LESS_THAN = 3; - // Parameter must be included in the comma separated list {value}. - TYPE_INCLUSION = 4; - // Parameter must not be included in the comma separated list {value}. - TYPE_EXCLUSION = 5; - // Parameter must match the regex {value}. - TYPE_REGEX = 6; - } - - Type type = 1; - // The value to be compared with the parameter, - // or a comma separated list in case of Validation.TYPE_INCLUSION or Validation.TYPE_EXCLUSION. - string value = 2; - } - - // Type shows the parameter type. - enum Type { - TYPE_UNSPECIFIED = 0; - // Parameter is a string. - TYPE_STRING = 1; - // Parameter is an integer. - TYPE_INT = 2; - // Parameter is a float. - TYPE_FLOAT = 3; - // Parameter is a boolean. - TYPE_BOOL = 4; - // Parameter is a file. - TYPE_FILE = 5; - // Parameter is a duration. - TYPE_DURATION = 6; - } - - string description = 1; - string default = 2; - Type type = 3; - repeated Validation validations = 4; - } - - string name = 1; - string summary = 2; - string description = 3; - string version = 4; - string author = 5; - map destination_params = 6; - map source_params = 7; -} diff --git a/proto/api/v1/api.swagger.json b/proto/api/v1/api.swagger.json index 1e7f86e8b..d5a3c4ee7 100644 --- a/proto/api/v1/api.swagger.json +++ b/proto/api/v1/api.swagger.json @@ -156,6 +156,48 @@ ] } }, + "/v1/connectors/plugins": { + "get": { + "operationId": "ConnectorService_ListConnectorPlugins", + "responses": { + "200": { + "description": "", + "schema": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v1ConnectorPluginSpecifications" + } + } + }, + "500": { + "description": "", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + }, + "examples": { + "application/json": { + "code": 13, + "message": "server error", + "details": [] + } + } + } + }, + "parameters": [ + { + "name": "name", + "description": "Regex to filter plugins by name.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "ConnectorService" + ] + } + }, "/v1/connectors/validate": { "post": { "operationId": "ConnectorService_ValidateConnector", @@ -1036,6 +1078,7 @@ }, "/v1/plugins": { "get": { + "summary": "Deprecated: use ConnectorService.ListConnectorPlugins instead.", "operationId": "PluginService_ListPlugins", "responses": { "200": { @@ -1171,6 +1214,48 @@ ] } }, + "/v1/processors/plugins": { + "get": { + "operationId": "ProcessorService_ListProcessorPlugins", + "responses": { + "200": { + "description": "", + "schema": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v1ProcessorPluginSpecifications" + } + } + }, + "500": { + "description": "", + "schema": { + "$ref": "#/definitions/googlerpcStatus" + }, + "examples": { + "application/json": { + "code": 13, + "message": "server error", + "details": [] + } + } + } + }, + "parameters": [ + { + "name": "name", + "description": "Regex to filter plugins by name.", + "in": "query", + "required": false, + "type": "string" + } + ], + "tags": [ + "ProcessorService" + ] + } + }, "/v1/processors/{id}": { "get": { "operationId": "ProcessorService_GetProcessor", @@ -1467,33 +1552,6 @@ } } }, - "ParameterValidation": { - "type": "object", - "properties": { - "type": { - "$ref": "#/definitions/ParameterValidationType" - }, - "value": { - "type": "string", - "description": "The value to be compared with the parameter,\nor a comma separated list in case of Validation.TYPE_INCLUSION or Validation.TYPE_EXCLUSION." - } - }, - "description": "Validation to be made on the parameter." - }, - "ParameterValidationType": { - "type": "string", - "enum": [ - "TYPE_UNSPECIFIED", - "TYPE_REQUIRED", - "TYPE_GREATER_THAN", - "TYPE_LESS_THAN", - "TYPE_INCLUSION", - "TYPE_EXCLUSION", - "TYPE_REGEX" - ], - "default": "TYPE_UNSPECIFIED", - "description": " - TYPE_REQUIRED: Parameter must be present.\n - TYPE_GREATER_THAN: Parameter must be greater than {value}.\n - TYPE_LESS_THAN: Parameter must be less than {value}.\n - TYPE_INCLUSION: Parameter must be included in the comma separated list {value}.\n - TYPE_EXCLUSION: Parameter must not be included in the comma separated list {value}.\n - TYPE_REGEX: Parameter must match the regex {value}." - }, "PipelineDLQ": { "type": "object", "properties": { @@ -1532,40 +1590,32 @@ } } }, - "PluginSpecificationsParameter": { + "PluginSpecificationsParameterValidation": { "type": "object", "properties": { - "description": { - "type": "string" - }, - "default": { - "type": "string" - }, "type": { - "$ref": "#/definitions/PluginSpecificationsParameterType" + "$ref": "#/definitions/PluginSpecificationsParameterValidationType" }, - "validations": { - "type": "array", - "items": { - "type": "object", - "$ref": "#/definitions/ParameterValidation" - } + "value": { + "type": "string", + "description": "The value to be compared with the parameter,\nor a comma separated list in case of Validation.TYPE_INCLUSION or Validation.TYPE_EXCLUSION." } - } + }, + "description": "Deprecated: use config.v1.Validation instead." }, - "PluginSpecificationsParameterType": { + "PluginSpecificationsParameterValidationType": { "type": "string", "enum": [ "TYPE_UNSPECIFIED", - "TYPE_STRING", - "TYPE_INT", - "TYPE_FLOAT", - "TYPE_BOOL", - "TYPE_FILE", - "TYPE_DURATION" + "TYPE_REQUIRED", + "TYPE_GREATER_THAN", + "TYPE_LESS_THAN", + "TYPE_INCLUSION", + "TYPE_EXCLUSION", + "TYPE_REGEX" ], "default": "TYPE_UNSPECIFIED", - "description": "Type shows the parameter type.\n\n - TYPE_STRING: Parameter is a string.\n - TYPE_INT: Parameter is an integer.\n - TYPE_FLOAT: Parameter is a float.\n - TYPE_BOOL: Parameter is a boolean.\n - TYPE_FILE: Parameter is a file.\n - TYPE_DURATION: Parameter is a duration." + "description": "Deprecated: use config.v1.Validation.Type instead.\n\n - TYPE_REQUIRED: Parameter must be present.\n - TYPE_GREATER_THAN: Parameter must be greater than {value}.\n - TYPE_LESS_THAN: Parameter must be less than {value}.\n - TYPE_INCLUSION: Parameter must be included in the comma separated list {value}.\n - TYPE_EXCLUSION: Parameter must not be included in the comma separated list {value}.\n - TYPE_REGEX: Parameter must match the regex {value}." }, "ProcessorParent": { "type": "object", @@ -1602,6 +1652,73 @@ } } }, + "configv1Parameter": { + "type": "object", + "properties": { + "default": { + "type": "string", + "description": "Default is the default value of the parameter. If there is no default\nvalue use an empty string." + }, + "description": { + "type": "string", + "description": "Description explains what the parameter does and how to configure it." + }, + "type": { + "$ref": "#/definitions/configv1ParameterType", + "description": "Type defines the parameter data type." + }, + "validations": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/configv1Validation" + }, + "description": "Validations are validations to be made on the parameter." + } + }, + "description": "Parameter describes a single config parameter." + }, + "configv1ParameterType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "TYPE_STRING", + "TYPE_INT", + "TYPE_FLOAT", + "TYPE_BOOL", + "TYPE_FILE", + "TYPE_DURATION" + ], + "default": "TYPE_UNSPECIFIED", + "description": "Type shows the parameter type.\n\n - TYPE_STRING: Parameter is a string.\n - TYPE_INT: Parameter is an integer.\n - TYPE_FLOAT: Parameter is a float.\n - TYPE_BOOL: Parameter is a boolean.\n - TYPE_FILE: Parameter is a file.\n - TYPE_DURATION: Parameter is a duration." + }, + "configv1Validation": { + "type": "object", + "properties": { + "type": { + "$ref": "#/definitions/configv1ValidationType" + }, + "value": { + "type": "string", + "description": "The value to be compared with the parameter,\nor a comma separated list in case of Validation.TYPE_INCLUSION or Validation.TYPE_EXCLUSION." + } + }, + "description": "Validation to be made on the parameter." + }, + "configv1ValidationType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "TYPE_REQUIRED", + "TYPE_GREATER_THAN", + "TYPE_LESS_THAN", + "TYPE_INCLUSION", + "TYPE_EXCLUSION", + "TYPE_REGEX" + ], + "default": "TYPE_UNSPECIFIED", + "description": " - TYPE_REQUIRED: Parameter must be present.\n - TYPE_GREATER_THAN: Parameter must be greater than {value}.\n - TYPE_LESS_THAN: Parameter must be less than {value}.\n - TYPE_INCLUSION: Parameter must be included in the comma separated list {value}.\n - TYPE_EXCLUSION: Parameter must not be included in the comma separated list {value}.\n - TYPE_REGEX: Parameter must match the regex {value}." + }, "googlerpcStatus": { "type": "object", "properties": { @@ -1720,6 +1837,46 @@ } } }, + "v1ConnectorPluginSpecifications": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name is the name of the plugin." + }, + "summary": { + "type": "string", + "description": "Summary is a brief description of the plugin and what it does,\nideally not longer than one sentence." + }, + "description": { + "type": "string", + "description": "Description is a longer form field, appropriate for README-like\ntext that the author can provide for documentation about the\nusage of the plugin." + }, + "version": { + "type": "string", + "description": "Version string. Should follow semantic versioning and use the \"v\"\nprefix (e.g. v1.23.4)." + }, + "author": { + "type": "string", + "description": "Author declares the entity that created or maintains this plugin." + }, + "destinationParams": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/configv1Parameter" + }, + "description": "A map that describes parameters available for configuring the\ndestination plugin." + }, + "sourceParams": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/configv1Parameter" + }, + "description": "A map that describes parameters available for configuring the\nsource plugin." + } + }, + "description": "ConnectorPluginSpecifications describes the specifications of a connector plugin." + }, "v1ConnectorType": { "type": "string", "enum": [ @@ -1738,7 +1895,7 @@ }, "plugin": { "type": "string", - "description": "Plugin name is the name of the builtin plugin, or the absolute path of a standalone plugin." + "title": "Used to reference a plugin. Its format is as follows:\n[PLUGIN-TYPE:]PLUGIN-NAME[@VERSION]\nPLUGIN-TYPE: One of: builtin, standalone or any (default).\nPLUGIN-NAME: The name of the plugin as specified in the plugin specifications.\nVERSION: The plugin version as specified in the plugin specifications or latest (default).\nFor more information, see: https://conduit.io/docs/connectors/referencing/" }, "pipelineId": { "type": "string", @@ -1787,6 +1944,9 @@ }, "condition": { "type": "string" + }, + "plugin": { + "type": "string" } } }, @@ -1902,6 +2062,18 @@ } } }, + "v1ListConnectorPluginsResponse": { + "type": "object", + "properties": { + "plugins": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v1ConnectorPluginSpecifications" + } + } + } + }, "v1ListConnectorsResponse": { "type": "object", "properties": { @@ -1936,6 +2108,19 @@ "$ref": "#/definitions/v1PluginSpecifications" } } + }, + "description": "Deprecated: use ConnectorService.ListConnectorPlugins instead." + }, + "v1ListProcessorPluginsResponse": { + "type": "object", + "properties": { + "plugins": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/v1ProcessorPluginSpecifications" + } + } } }, "v1ListProcessorsResponse": { @@ -2030,16 +2215,53 @@ "destinationParams": { "type": "object", "additionalProperties": { - "$ref": "#/definitions/PluginSpecificationsParameter" + "$ref": "#/definitions/v1PluginSpecificationsParameter" } }, "sourceParams": { "type": "object", "additionalProperties": { - "$ref": "#/definitions/PluginSpecificationsParameter" + "$ref": "#/definitions/v1PluginSpecificationsParameter" } } - } + }, + "description": "Deprecated: use ConnectorPluginSpecifications instead." + }, + "v1PluginSpecificationsParameter": { + "type": "object", + "properties": { + "description": { + "type": "string" + }, + "default": { + "type": "string" + }, + "type": { + "$ref": "#/definitions/v1PluginSpecificationsParameterType" + }, + "validations": { + "type": "array", + "items": { + "type": "object", + "$ref": "#/definitions/PluginSpecificationsParameterValidation" + } + } + }, + "description": "Deprecated: use config.v1.Parameter instead." + }, + "v1PluginSpecificationsParameterType": { + "type": "string", + "enum": [ + "TYPE_UNSPECIFIED", + "TYPE_STRING", + "TYPE_INT", + "TYPE_FLOAT", + "TYPE_BOOL", + "TYPE_FILE", + "TYPE_DURATION" + ], + "default": "TYPE_UNSPECIFIED", + "description": "Deprecated: use config.v1.Parameter.Type instead.\n\n - TYPE_STRING: Parameter is a string.\n - TYPE_INT: Parameter is an integer.\n - TYPE_FLOAT: Parameter is a float.\n - TYPE_BOOL: Parameter is a boolean.\n - TYPE_FILE: Parameter is a file.\n - TYPE_DURATION: Parameter is a duration." }, "v1Processor": { "type": "object", @@ -2051,16 +2273,16 @@ "config": { "$ref": "#/definitions/v1ProcessorConfig" }, - "type": { + "condition": { + "type": "string", + "title": "Condition is a goTemplate formatted string, the value provided to the template is a sdk.Record, it should evaluate\nto a boolean value, indicating a condition to run the processor for a specific record or not. (template functions\nprovided by `sprig` are injected)" + }, + "plugin": { "type": "string" }, "parent": { "$ref": "#/definitions/ProcessorParent" }, - "condition": { - "type": "string", - "title": "Condition is a goTemplate formatted string, the value provided to the template is a sdk.Record, it should evaluate\nto a boolean value, indicating a condition to run the processor for a specific record or not. (template functions\nprovided by `sprig` are injected)" - }, "createdAt": { "type": "string", "format": "date-time" @@ -2086,6 +2308,39 @@ } } }, + "v1ProcessorPluginSpecifications": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name is the name of the plugin." + }, + "summary": { + "type": "string", + "description": "Summary is a brief description of the plugin and what it does,\nideally not longer than one sentence." + }, + "description": { + "type": "string", + "description": "Description is a longer form field, appropriate for README-like\ntext that the author can provide for documentation about the\nusage of the plugin." + }, + "version": { + "type": "string", + "description": "Version string. Should follow semantic versioning and use the \"v\"\nprefix (e.g. v1.23.4)." + }, + "author": { + "type": "string", + "description": "Author declares the entity that created or maintains this plugin." + }, + "parameters": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/configv1Parameter" + }, + "description": "A map that describes parameters available for configuring the\nprocessor plugin." + } + }, + "description": "ProcessorPluginSpecifications describes the specifications of a processor plugin." + }, "v1Record": { "type": "object", "properties": { diff --git a/proto/api/v1/api_grpc.pb.go b/proto/api/v1/api_grpc.pb.go index 1af34a1db..68d6ae054 100644 --- a/proto/api/v1/api_grpc.pb.go +++ b/proto/api/v1/api_grpc.pb.go @@ -479,13 +479,14 @@ var PipelineService_ServiceDesc = grpc.ServiceDesc{ } const ( - ConnectorService_ListConnectors_FullMethodName = "/api.v1.ConnectorService/ListConnectors" - ConnectorService_InspectConnector_FullMethodName = "/api.v1.ConnectorService/InspectConnector" - ConnectorService_GetConnector_FullMethodName = "/api.v1.ConnectorService/GetConnector" - ConnectorService_CreateConnector_FullMethodName = "/api.v1.ConnectorService/CreateConnector" - ConnectorService_ValidateConnector_FullMethodName = "/api.v1.ConnectorService/ValidateConnector" - ConnectorService_UpdateConnector_FullMethodName = "/api.v1.ConnectorService/UpdateConnector" - ConnectorService_DeleteConnector_FullMethodName = "/api.v1.ConnectorService/DeleteConnector" + ConnectorService_ListConnectors_FullMethodName = "/api.v1.ConnectorService/ListConnectors" + ConnectorService_InspectConnector_FullMethodName = "/api.v1.ConnectorService/InspectConnector" + ConnectorService_GetConnector_FullMethodName = "/api.v1.ConnectorService/GetConnector" + ConnectorService_CreateConnector_FullMethodName = "/api.v1.ConnectorService/CreateConnector" + ConnectorService_ValidateConnector_FullMethodName = "/api.v1.ConnectorService/ValidateConnector" + ConnectorService_UpdateConnector_FullMethodName = "/api.v1.ConnectorService/UpdateConnector" + ConnectorService_DeleteConnector_FullMethodName = "/api.v1.ConnectorService/DeleteConnector" + ConnectorService_ListConnectorPlugins_FullMethodName = "/api.v1.ConnectorService/ListConnectorPlugins" ) // ConnectorServiceClient is the client API for ConnectorService service. @@ -499,6 +500,7 @@ type ConnectorServiceClient interface { ValidateConnector(ctx context.Context, in *ValidateConnectorRequest, opts ...grpc.CallOption) (*ValidateConnectorResponse, error) UpdateConnector(ctx context.Context, in *UpdateConnectorRequest, opts ...grpc.CallOption) (*UpdateConnectorResponse, error) DeleteConnector(ctx context.Context, in *DeleteConnectorRequest, opts ...grpc.CallOption) (*DeleteConnectorResponse, error) + ListConnectorPlugins(ctx context.Context, in *ListConnectorPluginsRequest, opts ...grpc.CallOption) (*ListConnectorPluginsResponse, error) } type connectorServiceClient struct { @@ -595,6 +597,15 @@ func (c *connectorServiceClient) DeleteConnector(ctx context.Context, in *Delete return out, nil } +func (c *connectorServiceClient) ListConnectorPlugins(ctx context.Context, in *ListConnectorPluginsRequest, opts ...grpc.CallOption) (*ListConnectorPluginsResponse, error) { + out := new(ListConnectorPluginsResponse) + err := c.cc.Invoke(ctx, ConnectorService_ListConnectorPlugins_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // ConnectorServiceServer is the server API for ConnectorService service. // All implementations must embed UnimplementedConnectorServiceServer // for forward compatibility @@ -606,6 +617,7 @@ type ConnectorServiceServer interface { ValidateConnector(context.Context, *ValidateConnectorRequest) (*ValidateConnectorResponse, error) UpdateConnector(context.Context, *UpdateConnectorRequest) (*UpdateConnectorResponse, error) DeleteConnector(context.Context, *DeleteConnectorRequest) (*DeleteConnectorResponse, error) + ListConnectorPlugins(context.Context, *ListConnectorPluginsRequest) (*ListConnectorPluginsResponse, error) mustEmbedUnimplementedConnectorServiceServer() } @@ -634,6 +646,9 @@ func (UnimplementedConnectorServiceServer) UpdateConnector(context.Context, *Upd func (UnimplementedConnectorServiceServer) DeleteConnector(context.Context, *DeleteConnectorRequest) (*DeleteConnectorResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method DeleteConnector not implemented") } +func (UnimplementedConnectorServiceServer) ListConnectorPlugins(context.Context, *ListConnectorPluginsRequest) (*ListConnectorPluginsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListConnectorPlugins not implemented") +} func (UnimplementedConnectorServiceServer) mustEmbedUnimplementedConnectorServiceServer() {} // UnsafeConnectorServiceServer may be embedded to opt out of forward compatibility for this service. @@ -776,6 +791,24 @@ func _ConnectorService_DeleteConnector_Handler(srv interface{}, ctx context.Cont return interceptor(ctx, in, info, handler) } +func _ConnectorService_ListConnectorPlugins_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListConnectorPluginsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ConnectorServiceServer).ListConnectorPlugins(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ConnectorService_ListConnectorPlugins_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ConnectorServiceServer).ListConnectorPlugins(ctx, req.(*ListConnectorPluginsRequest)) + } + return interceptor(ctx, in, info, handler) +} + // ConnectorService_ServiceDesc is the grpc.ServiceDesc for ConnectorService service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -807,6 +840,10 @@ var ConnectorService_ServiceDesc = grpc.ServiceDesc{ MethodName: "DeleteConnector", Handler: _ConnectorService_DeleteConnector_Handler, }, + { + MethodName: "ListConnectorPlugins", + Handler: _ConnectorService_ListConnectorPlugins_Handler, + }, }, Streams: []grpc.StreamDesc{ { @@ -819,13 +856,14 @@ var ConnectorService_ServiceDesc = grpc.ServiceDesc{ } const ( - ProcessorService_ListProcessors_FullMethodName = "/api.v1.ProcessorService/ListProcessors" - ProcessorService_InspectProcessorIn_FullMethodName = "/api.v1.ProcessorService/InspectProcessorIn" - ProcessorService_InspectProcessorOut_FullMethodName = "/api.v1.ProcessorService/InspectProcessorOut" - ProcessorService_GetProcessor_FullMethodName = "/api.v1.ProcessorService/GetProcessor" - ProcessorService_CreateProcessor_FullMethodName = "/api.v1.ProcessorService/CreateProcessor" - ProcessorService_UpdateProcessor_FullMethodName = "/api.v1.ProcessorService/UpdateProcessor" - ProcessorService_DeleteProcessor_FullMethodName = "/api.v1.ProcessorService/DeleteProcessor" + ProcessorService_ListProcessors_FullMethodName = "/api.v1.ProcessorService/ListProcessors" + ProcessorService_InspectProcessorIn_FullMethodName = "/api.v1.ProcessorService/InspectProcessorIn" + ProcessorService_InspectProcessorOut_FullMethodName = "/api.v1.ProcessorService/InspectProcessorOut" + ProcessorService_GetProcessor_FullMethodName = "/api.v1.ProcessorService/GetProcessor" + ProcessorService_CreateProcessor_FullMethodName = "/api.v1.ProcessorService/CreateProcessor" + ProcessorService_UpdateProcessor_FullMethodName = "/api.v1.ProcessorService/UpdateProcessor" + ProcessorService_DeleteProcessor_FullMethodName = "/api.v1.ProcessorService/DeleteProcessor" + ProcessorService_ListProcessorPlugins_FullMethodName = "/api.v1.ProcessorService/ListProcessorPlugins" ) // ProcessorServiceClient is the client API for ProcessorService service. @@ -841,6 +879,7 @@ type ProcessorServiceClient interface { CreateProcessor(ctx context.Context, in *CreateProcessorRequest, opts ...grpc.CallOption) (*CreateProcessorResponse, error) UpdateProcessor(ctx context.Context, in *UpdateProcessorRequest, opts ...grpc.CallOption) (*UpdateProcessorResponse, error) DeleteProcessor(ctx context.Context, in *DeleteProcessorRequest, opts ...grpc.CallOption) (*DeleteProcessorResponse, error) + ListProcessorPlugins(ctx context.Context, in *ListProcessorPluginsRequest, opts ...grpc.CallOption) (*ListProcessorPluginsResponse, error) } type processorServiceClient struct { @@ -960,6 +999,15 @@ func (c *processorServiceClient) DeleteProcessor(ctx context.Context, in *Delete return out, nil } +func (c *processorServiceClient) ListProcessorPlugins(ctx context.Context, in *ListProcessorPluginsRequest, opts ...grpc.CallOption) (*ListProcessorPluginsResponse, error) { + out := new(ListProcessorPluginsResponse) + err := c.cc.Invoke(ctx, ProcessorService_ListProcessorPlugins_FullMethodName, in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // ProcessorServiceServer is the server API for ProcessorService service. // All implementations must embed UnimplementedProcessorServiceServer // for forward compatibility @@ -973,6 +1021,7 @@ type ProcessorServiceServer interface { CreateProcessor(context.Context, *CreateProcessorRequest) (*CreateProcessorResponse, error) UpdateProcessor(context.Context, *UpdateProcessorRequest) (*UpdateProcessorResponse, error) DeleteProcessor(context.Context, *DeleteProcessorRequest) (*DeleteProcessorResponse, error) + ListProcessorPlugins(context.Context, *ListProcessorPluginsRequest) (*ListProcessorPluginsResponse, error) mustEmbedUnimplementedProcessorServiceServer() } @@ -1001,6 +1050,9 @@ func (UnimplementedProcessorServiceServer) UpdateProcessor(context.Context, *Upd func (UnimplementedProcessorServiceServer) DeleteProcessor(context.Context, *DeleteProcessorRequest) (*DeleteProcessorResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method DeleteProcessor not implemented") } +func (UnimplementedProcessorServiceServer) ListProcessorPlugins(context.Context, *ListProcessorPluginsRequest) (*ListProcessorPluginsResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method ListProcessorPlugins not implemented") +} func (UnimplementedProcessorServiceServer) mustEmbedUnimplementedProcessorServiceServer() {} // UnsafeProcessorServiceServer may be embedded to opt out of forward compatibility for this service. @@ -1146,6 +1198,24 @@ func _ProcessorService_DeleteProcessor_Handler(srv interface{}, ctx context.Cont return interceptor(ctx, in, info, handler) } +func _ProcessorService_ListProcessorPlugins_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ListProcessorPluginsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(ProcessorServiceServer).ListProcessorPlugins(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: ProcessorService_ListProcessorPlugins_FullMethodName, + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(ProcessorServiceServer).ListProcessorPlugins(ctx, req.(*ListProcessorPluginsRequest)) + } + return interceptor(ctx, in, info, handler) +} + // ProcessorService_ServiceDesc is the grpc.ServiceDesc for ProcessorService service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -1173,6 +1243,10 @@ var ProcessorService_ServiceDesc = grpc.ServiceDesc{ MethodName: "DeleteProcessor", Handler: _ProcessorService_DeleteProcessor_Handler, }, + { + MethodName: "ListProcessorPlugins", + Handler: _ProcessorService_ListProcessorPlugins_Handler, + }, }, Streams: []grpc.StreamDesc{ { @@ -1287,6 +1361,8 @@ const ( // // For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. type PluginServiceClient interface { + // Deprecated: Do not use. + // Deprecated: use ConnectorService.ListConnectorPlugins instead. ListPlugins(ctx context.Context, in *ListPluginsRequest, opts ...grpc.CallOption) (*ListPluginsResponse, error) } @@ -1298,6 +1374,7 @@ func NewPluginServiceClient(cc grpc.ClientConnInterface) PluginServiceClient { return &pluginServiceClient{cc} } +// Deprecated: Do not use. func (c *pluginServiceClient) ListPlugins(ctx context.Context, in *ListPluginsRequest, opts ...grpc.CallOption) (*ListPluginsResponse, error) { out := new(ListPluginsResponse) err := c.cc.Invoke(ctx, PluginService_ListPlugins_FullMethodName, in, out, opts...) @@ -1311,6 +1388,8 @@ func (c *pluginServiceClient) ListPlugins(ctx context.Context, in *ListPluginsRe // All implementations must embed UnimplementedPluginServiceServer // for forward compatibility type PluginServiceServer interface { + // Deprecated: Do not use. + // Deprecated: use ConnectorService.ListConnectorPlugins instead. ListPlugins(context.Context, *ListPluginsRequest) (*ListPluginsResponse, error) mustEmbedUnimplementedPluginServiceServer() } diff --git a/proto/buf.lock b/proto/buf.lock index e4634e370..a8fa7545b 100644 --- a/proto/buf.lock +++ b/proto/buf.lock @@ -4,13 +4,13 @@ deps: - remote: buf.build owner: conduitio repository: conduit-commons - commit: de2e1a18c9e042119dedd69b852efc21 - digest: shake256:1275ac9a7d437cc6b48bd6ff2e9edc59f0450a8f9ab6050f49fb9949e231b7f4d91debfd42b09fc29d9e4aed1b9904266a46a46f7a5676218b53aafb7efe211d + commit: 5b10e1d6574640b2864772621d09bba7 + digest: shake256:54f1581e61a4f540fe141893c80a924e0091cfe57b19bde9078ca10dd152828acdebcc8e6b9abf2fb6c8dbede06d0903fc85b426009ad9e9fb149813bfe75d63 - remote: buf.build owner: googleapis repository: googleapis - commit: a86849a25cc04f4dbe9b15ddddfbc488 - digest: shake256:e19143328f8cbfe13fc226aeee5e63773ca494693a72740a7560664270039a380d94a1344234b88c7691311460df9a9b1c2982190d0a2612eae80368718e1943 + commit: 7e6f6e774e29406da95bd61cdcdbc8bc + digest: shake256:fe43dd2265ea0c07d76bd925eeba612667cf4c948d2ce53d6e367e1b4b3cb5fa69a51e6acb1a6a50d32f894f054a35e6c0406f6808a483f2752e10c866ffbf73 - remote: buf.build owner: grpc-ecosystem repository: grpc-gateway diff --git a/tools.go b/tools.go index 1be76d003..96a5e867a 100644 --- a/tools.go +++ b/tools.go @@ -18,6 +18,7 @@ package main import ( _ "github.com/bufbuild/buf/cmd/buf" + _ "github.com/conduitio/conduit-commons/paramgen" _ "github.com/golangci/golangci-lint/cmd/golangci-lint" _ "go.uber.org/mock/mockgen" _ "golang.org/x/tools/cmd/stringer"