diff --git a/airbyte-api/build.gradle.kts b/airbyte-api/build.gradle.kts index 7ea62f83e4a..f1b015d6c92 100644 --- a/airbyte-api/build.gradle.kts +++ b/airbyte-api/build.gradle.kts @@ -6,26 +6,24 @@ plugins { id("io.airbyte.gradle.jvm.lib") } -airbyte { - spotless { - excludes = listOf( - project(":oss:airbyte-api:workload-api").file("src/main/openapi/workload-openapi.yaml").path, - project(":oss:airbyte-api:server-api").file("src/main/openapi/api.yaml").path, - - project(":oss:airbyte-api:server-api").file("src/main/openapi/api_sdk.yaml").path, - project(":oss:airbyte-api:server-api").file("src/main/openapi/api_terraform.yaml").path, - project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_connections.yaml").path, - project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_sources.yaml").path, - project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_destinations.yaml").path, - project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_streams.yaml").path, - project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_jobs.yaml").path, - project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_workspaces.yaml").path, - ) - } +dependencies { + project.subprojects.forEach { implementation(project(it.path)) } } -dependencies { - project.subprojects.forEach { subProject -> - implementation(project(subProject.path)) +airbyte { + spotless { + excludes = + listOf( + project(":oss:airbyte-api:workload-api").file("src/main/openapi/workload-openapi.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_sdk.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_terraform.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_connections.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_sources.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_destinations.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_streams.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_jobs.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_workspaces.yaml").path, + ) } } diff --git a/airbyte-api/commons/src/main/kotlin/io/airbyte/api/client/auth/KeycloakAccessTokenInterceptor.kt b/airbyte-api/commons/src/main/kotlin/io/airbyte/api/client/auth/KeycloakAccessTokenInterceptor.kt index 03c36a257be..b425c31cd2c 100644 --- a/airbyte-api/commons/src/main/kotlin/io/airbyte/api/client/auth/KeycloakAccessTokenInterceptor.kt +++ b/airbyte-api/commons/src/main/kotlin/io/airbyte/api/client/auth/KeycloakAccessTokenInterceptor.kt @@ -3,6 +3,7 @@ package io.airbyte.api.client.auth import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Requires import io.micronaut.http.HttpHeaders +import io.micronaut.http.exceptions.HttpException import io.micronaut.security.oauth2.client.clientcredentials.ClientCredentialsClient import jakarta.inject.Named import jakarta.inject.Singleton @@ -10,6 +11,8 @@ import okhttp3.Interceptor import okhttp3.Request import okhttp3.Response import reactor.core.publisher.Mono +import reactor.util.retry.Retry +import java.time.Duration private val logger = KotlinLogging.logger {} @@ -26,25 +29,36 @@ private val logger = KotlinLogging.logger {} class KeycloakAccessTokenInterceptor( @Named("keycloak") private val clientCredentialsClient: ClientCredentialsClient, ) : AirbyteApiInterceptor { - override fun intercept(chain: Interceptor.Chain): Response = + private fun fetchAccessToken(): Mono { + return Mono.defer { Mono.from(clientCredentialsClient.requestToken()) } + .map { it.accessToken } + .retryWhen( + Retry + .backoff(3, Duration.ofSeconds(1)) + .filter { it is HttpException }, + ) + } + + override fun intercept(chain: Interceptor.Chain): Response { + val originalRequest: Request = chain.request() + val builder: Request.Builder = originalRequest.newBuilder() + try { logger.debug { "Intercepting request to add Keycloak access token..." } - val originalRequest: Request = chain.request() - val builder: Request.Builder = originalRequest.newBuilder() - val tokenResponse = Mono.from(clientCredentialsClient.requestToken()).block() - val accessToken = tokenResponse?.accessToken + val accessToken = fetchAccessToken().block() if (accessToken != null) { builder.addHeader(HttpHeaders.AUTHORIZATION, "Bearer $accessToken") logger.debug { "Added access token to header $accessToken" } - chain.proceed(builder.build()) } else { logger.error { "Failed to obtain access token from Keycloak" } - chain.proceed(originalRequest) } } catch (e: Exception) { logger.error(e) { "Failed to add Keycloak access token to request" } // do not throw exception, just proceed with the original request and let the request fail // authorization downstream. - chain.proceed(chain.request()) + return chain.proceed(originalRequest) } + + return chain.proceed(builder.build()) + } } diff --git a/airbyte-api/commons/src/main/openapi/cloud-config.yaml b/airbyte-api/commons/src/main/openapi/cloud-config.yaml index fa606421ad1..a780c952e84 100644 --- a/airbyte-api/commons/src/main/openapi/cloud-config.yaml +++ b/airbyte-api/commons/src/main/openapi/cloud-config.yaml @@ -338,29 +338,7 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" - /v1/cloud_workspaces/get_usage: - post: - tags: - - cloud_workspace - summary: Get usage for a workspace. - operationId: getCloudWorkspaceUsage - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/CloudWorkspaceUsageRequestBody" - required: true - responses: - "200": - description: Successful operation - content: - application/json: - schema: - $ref: "#/components/schemas/CloudWorkspaceUsageRead" - "404": - $ref: "#/components/responses/NotFoundResponse" - "422": - $ref: "#/components/responses/InvalidInputResponse" + # permissions /v1/permissions/create: post: @@ -448,52 +426,7 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" - # STRIPE - /v1/stripe/create_checkout_session: - post: - tags: - - stripe - summary: Creates a Stripe Checkout session for the user and workspace, returning the Stripe URL that the user should be redirected to. - operationId: createStripeCheckoutSession - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/StripeCheckoutSessionCreate" - required: true - responses: - "200": - description: Successful operation - content: - application/json: - schema: - $ref: "#/components/schemas/StripeCheckoutSessionRead" - "404": - $ref: "#/components/responses/NotFoundResponse" - "422": - $ref: "#/components/responses/InvalidInputResponse" - /v1/stripe/complete_checkout_session: - post: - tags: - - stripe - summary: Handle Stripe Checkout Completed event by adding credits to customer in Orb - operationId: stripeCheckoutSessionCompleted - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/StripeCheckoutSessionCompletedEvent" - required: true - responses: - "200": - description: Successful operation - "400": - description: Invalid payload - "401": - description: Invalid signature - # This route is unsecured as it accepts webhook events from Stripe. - # Validation of these events happens in the Cloud Server's ExternalRequestAuthFilter. - security: [] + # WEB_BACKEND /v1/web_backend/users/revoke_user_session: post: @@ -720,10 +653,6 @@ components: properties: authUserId: type: string - UserPaymentAccountId: - type: string - description: UserPaymentAccount ID from the Cloud database - format: uuid UserRead: type: object required: @@ -878,10 +807,6 @@ components: type: string description: Workspace Id from OSS Airbyte instance format: uuid - Credits: - type: integer - description: Number of credits bought by the customer - format: int64 WorkspaceIdRequestBody: type: object required: @@ -889,16 +814,6 @@ components: properties: workspaceId: $ref: "#/components/schemas/WorkspaceId" - WorkspaceCreditsRequestBody: - type: object - required: - - workspaceId - - credits - properties: - workspaceId: - $ref: "#/components/schemas/WorkspaceId" - credits: - $ref: "#/components/schemas/Credits" ListResourcesForWorkspacesRequestBody: type: object required: @@ -958,105 +873,7 @@ components: type: boolean hasNonEligibleConnections: type: boolean - StripeCheckoutSessionCreate: - type: object - description: Input for creating a Stripe Checkout Session for an Airbyte workspace - required: - - workspaceId - - successUrl - - cancelUrl - - quantity - - stripeMode - properties: - workspaceId: - $ref: "#/components/schemas/WorkspaceId" - successUrl: - description: The Airbyte URL that the user should be redirected to upon payment success - type: string - cancelUrl: - description: The Airbyte URL that the user should be redirected to upon payment cancellation - type: string - quantity: - description: For payment mode only. Optional quantity of credits to use in the Stripe Checkout Session (can still be adjusted by the user). If unspecified, a default quantity will be set - $ref: "#/components/schemas/Credits" - stripeMode: - # TODO make this a required property after front-end has a chance to set it in the existing payment flow. openapi doesn't - # support default values when using a $ref so the handler code needs to impose the default behavior when this isn't set - description: The mode that the Checkout Session should be created in. - $ref: "#/components/schemas/StripeMode" - StripeMode: - # See Stripe API docs for more info https://stripe.com/docs/api/checkout/sessions/create#create_checkout_session-mode - description: Stripe's 'mode' param for the Stripe Checkout Session API. If unspecified, default to 'payment' - type: string - enum: - - payment - - setup - StripeCheckoutSessionRead: - description: Output of creating a Stripe Checkout Session, containing the session URL - type: object - required: - - stripeUrl - properties: - stripeUrl: - description: The URL of the Stripe Checkout Session created for this purchase, which the user should be redirected to - type: string - StripeCheckoutSessionCompletedEvent: - description: Webhook event coming from Stripe for the checkout.session.completed event - type: object - required: - - id - - type - - data - properties: - id: - description: ID of the Stripe webhook event - type: string - created: - description: The creation time of the Stripe webhook event in unix time seconds - type: integer - type: - description: The type of the webhook event. This spec only allows the following value, as this endpoint only processes Checkout Session Completed events - type: string - enum: - - checkout.session.completed - data: - description: The data payload of the webhook event. Redundantly, the actual Checkout Session object is further nested inside the 'object' property of this 'data' field. Since this comes directly from Stripe, we do not have control over this structure - type: object - required: - - object - properties: - object: - $ref: "#/components/schemas/StripeCheckoutSessionCompletedObject" - StripeCheckoutSessionCompletedObject: - description: The actual Checkout Session object in the webhook event. This has been pared down to contain only the necessary event properties for this API to function - type: object - required: - - id - - mode - - customer - properties: - id: - description: ID of the Checkout Session object in Stripe - type: string - mode: - description: The mode of the completed Checkout Session - $ref: "#/components/schemas/StripeMode" - customer: - description: The ID of the customer for the Session. Airbyte refers to this as the 'Stripe Customer ID' - type: string - invoice: - description: The Stripe Invoice ID associated with the completed session, if one exists. - type: string - payment_intent: - description: The Stripe Payment Intent ID associated with the completed session, if one exists. - type: string - metadata: - description: Metadata that Airbyte populates on the Checkout Session. At the moment, this only contains the Airbyte workspace ID - additionalProperties: - maxLength: 500 - type: string - nullable: true - type: object + # CLOUD_WORKSPACE extends Airbyte OSS WORKSPACE CloudWorkspaceCreate: type: object @@ -1105,40 +922,6 @@ components: If not set or false, the configuration is active. If true, then this configuration is permanently off. type: boolean - remainingCredits: - type: number - format: double - creditStatus: - description: - Status of the workspace's credit balance, to determine if sync jobs - can be launched for this workspace. - type: string - enum: - - positive - - negative_within_grace_period - - negative_beyond_grace_period - - negative_max_threshold - lastCreditPurchaseIncrementTimestamp: - description: The timestamp of the last time that credits were incremented for this workspace as a result of a credit purchase. Format is in unix epoch time milliseconds. - type: integer - format: int64 - workspaceTrialStatus: - description: Status of the workspace trial, depending on trial expiry date and credit purchase history - type: string - enum: - - pre_trial - - in_trial - - out_of_trial - - credit_purchased - trialExpiryTimestamp: - description: The timestamp of when the workspace trial's expires. Format is in unix epoch time milliseconds. Unset if workspace is out of trial. - type: integer - format: int64 - userPaymentAccountId: - $ref: "#/components/schemas/UserPaymentAccountId" - enrolledInFreeConnectorProgram: - type: boolean - description: Whether the workspace is enrolled in the Free Connector Program CloudWorkspaceLightRead: type: object required: @@ -1200,165 +983,6 @@ components: - archived - none - ConnectionProto: - type: object - required: - - connectionId - - connectionName - - status - - sourceId - - sourceDefinitionId - - sourceDefinitionName - - sourceConnectionName - - sourceIcon - - sourceReleaseStage - - sourceSupportLevel - - sourceCustom - - destinationId - - destinationDefinitionId - - destinationDefinitionName - - destinationConnectionName - - destinationIcon - - destinationReleaseStage - - destinationSupportLevel - - destinationCustom - properties: - connectionId: - type: string - format: uuid - connectionName: - type: string - status: - type: string - enum: - - active - - inactive - - deprecated - creditsConsumed: - description: Number of credits consumed rounded to two decimal places. - type: number - format: double - sourceId: - type: string - format: uuid - sourceDefinitionId: - type: string - format: uuid - sourceDefinitionName: - description: Name of the type of source - type: string - sourceConnectionName: - description: Name of this particular source as used in a single sync connection - type: string - sourceIcon: - description: Icon of the source actor. - type: string - sourceCustom: - description: True if the source is custom - type: boolean - default: false - sourceSupportLevel: - description: Support Level of source actor. - $ref: "#/components/schemas/ActorSupportLevel" - sourceReleaseStage: - description: Release stage of source actor. - $ref: "#/components/schemas/ActorReleaseStage" - destinationId: - type: string - format: uuid - destinationDefinitionId: - type: string - format: uuid - destinationDefinitionName: - description: Name of the type of destination - type: string - destinationConnectionName: - description: Name of this particular destination as used in a single sync connection - type: string - destinationIcon: - description: Icon of the destination actor. - type: string - destinationCustom: - description: True if the destination is custom - type: boolean - default: false - destinationSupportLevel: - description: Support Level of destination actor. - $ref: "#/components/schemas/ActorSupportLevel" - destinationReleaseStage: - description: Release stage of destination actor. - $ref: "#/components/schemas/ActorReleaseStage" - connectionScheduleType: - type: string - connectionScheduleTimeUnit: - type: string - connectionScheduleUnits: - format: int64 - type: integer - - ConsumptionTimeWindow: - type: string - enum: - - lastMonth - - lastSixMonths - - lastYear - default: lastMonth - - CloudWorkspaceUsageRequestBody: - type: object - required: - - workspaceId - properties: - workspaceId: - $ref: "#/components/schemas/WorkspaceId" - timeWindow: - description: Time window to calculate workspace credit consumptions - $ref: "#/components/schemas/ConsumptionTimeWindow" - - ConsumptionRead: - type: object - required: - - startTime - - endTime - - billedCost - - freeUsage - - connection - properties: - startTime: - description: start date of the timeframe, format "yyyy-mm-dd" - type: string - endTime: - description: end date of the timeframe, format "yyyy-mm-dd" - type: string - timeframe: - type: string - billedCost: - description: Billed costs consumed. - type: number - format: double - freeUsage: - description: Free usages consumed. - type: number - format: double - connection: - $ref: "#/components/schemas/ConnectionProto" - - CloudWorkspaceUsageRead: - type: object - required: - - workspaceId - - timeWindow - - consumptionPerConnectionPerTimeframe - properties: - workspaceId: - $ref: "#/components/schemas/WorkspaceId" - timeWindow: - $ref: "#/components/schemas/ConsumptionTimeWindow" - consumptionPerConnectionPerTimeframe: - type: array - items: - $ref: "#/components/schemas/ConsumptionRead" - # PERMISSIONS PermissionIdRequestBody: type: object diff --git a/airbyte-api/commons/src/test/kotlin/io/airbyte/api/client/auth/KeycloakAccessTokenInterceptorTest.kt b/airbyte-api/commons/src/test/kotlin/io/airbyte/api/client/auth/KeycloakAccessTokenInterceptorTest.kt index 728adad7f13..9fd188a4e62 100644 --- a/airbyte-api/commons/src/test/kotlin/io/airbyte/api/client/auth/KeycloakAccessTokenInterceptorTest.kt +++ b/airbyte-api/commons/src/test/kotlin/io/airbyte/api/client/auth/KeycloakAccessTokenInterceptorTest.kt @@ -1,6 +1,7 @@ package io.airbyte.api.client.auth import io.micronaut.http.HttpHeaders +import io.micronaut.http.client.exceptions.ResponseClosedException import io.micronaut.security.oauth2.client.clientcredentials.ClientCredentialsClient import io.micronaut.security.oauth2.endpoint.token.response.TokenResponse import io.mockk.every @@ -34,6 +35,46 @@ class KeycloakAccessTokenInterceptorTest { fun `test intercept when clientCredentialsClient fails to return token`() { every { clientCredentialsClient.requestToken() } returns Mono.error(RuntimeException("Failed to get token")) every { chain.request() } returns request + every { request.newBuilder() } returns Request.Builder().url("http://localhost") + every { chain.proceed(request) } returns response + + val result = interceptor.intercept(chain) + + assertEquals(response, result) + verify { chain.proceed(request) } + } + + @Test + fun `test intercept when clientCredentialsClient has intermittent HTTP error`() { + val tokenResponse = mockk() + every { tokenResponse.accessToken } returns "valid-token" + + every { clientCredentialsClient.requestToken() } returnsMany + listOf( + Mono.error(ResponseClosedException("HTTP error")), + Mono.just(tokenResponse), + ) + every { chain.request() } returns request + every { request.newBuilder() } returns Request.Builder().url("http://localhost") + every { chain.proceed(any()) } returns response + + val result = interceptor.intercept(chain) + assertEquals(response, result) + verify { + chain.proceed( + withArg { + val header = it.header(HttpHeaders.AUTHORIZATION) + assertEquals("Bearer valid-token", header) + }, + ) + } + } + + @Test + fun `test intercept when clientCredentialsClient returns persistent HTTP error`() { + every { clientCredentialsClient.requestToken() } returns Mono.error(ResponseClosedException("HTTP error")) + every { chain.request() } returns request + every { request.newBuilder() } returns Request.Builder().url("http://localhost") every { chain.proceed(request) } returns response val result = interceptor.intercept(chain) diff --git a/airbyte-api/connector-builder-api/build.gradle.kts b/airbyte-api/connector-builder-api/build.gradle.kts index 5f57eeae30c..cf8e6747b67 100644 --- a/airbyte-api/connector-builder-api/build.gradle.kts +++ b/airbyte-api/connector-builder-api/build.gradle.kts @@ -137,9 +137,9 @@ private fun updateDomainClientsWithFailsafe(clientPath: String) { // replace class declaration domainClientFileText = domainClientFileText.replace( - "class (\\S+)\\(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient\\) : ApiClient\\(basePath, client\\)" + "class (\\S+)\\(basePath: kotlin.String = defaultBasePath, client: Call.Factory = ApiClient.defaultClient\\) : ApiClient\\(basePath, client\\)" .toRegex(), - "class $1(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient, policy : RetryPolicy = RetryPolicy.ofDefaults()) : ApiClient(basePath, client, policy)", + "class $1(basePath: kotlin.String = defaultBasePath, client: Call.Factory = ApiClient.defaultClient, policy : RetryPolicy = RetryPolicy.ofDefaults()) : ApiClient(basePath, client, policy)", ) // add imports if not exist diff --git a/airbyte-api/problems-api/src/main/openapi/api-problems.yaml b/airbyte-api/problems-api/src/main/openapi/api-problems.yaml index cafa140517b..b2cd9e89f59 100644 --- a/airbyte-api/problems-api/src/main/openapi/api-problems.yaml +++ b/airbyte-api/problems-api/src/main/openapi/api-problems.yaml @@ -482,6 +482,27 @@ components: default: The specified cron timezone is invalid data: $ref: "#/components/schemas/ProblemCronTimezoneData" + CronValidationUnderOneHourNotAllowedProblemResponse: + x-implements: io.airbyte.api.problems.ProblemResponse + type: object + allOf: + - $ref: "#/components/schemas/BaseProblemFields" + - type: object + properties: + status: + type: integer + default: 400 + type: + type: string + default: error:cron-validation/under-one-hour-not-allowed + title: + type: string + default: Cron sync schedules more frequent than once per hour are not allowed + detail: + type: string + default: Syncs + data: + $ref: "#/components/schemas/ProblemCronExpressionData" MapperValidationProblemResponse: x-implements: io.airbyte.api.problems.ProblemResponse type: object @@ -503,6 +524,47 @@ components: default: The mapper configuration is invalid data: $ref: "#/components/schemas/ProblemMapperErrorsData" + MapperSecretNotFoundProblemResponse: + x-implements: io.airbyte.api.problems.ProblemResponse + type: object + allOf: + - $ref: "#/components/schemas/BaseProblemFields" + - type: object + properties: + status: + type: integer + default: 400 + type: + type: string + default: error:mapper-validation/secret-not-found + title: + type: string + default: Mapper secret not found + detail: + type: string + default: Attempted to use pre-existing secret values, but an existing secret for that mapper was not found. Please provide the secret values. + data: + $ref: "#/components/schemas/ProblemMapperIdData" + + RuntimeSecretsManagerRequiredProblemResponse: + x-implements: io.airbyte.api.problems.ProblemResponse + type: object + allOf: + - $ref: "#/components/schemas/BaseProblemFields" + - type: object + properties: + status: + type: integer + default: 409 + type: + type: string + default: error:mapper-validation/runtime-secrets-manager-required + title: + type: string + default: Runtime Secrets Manager Required + detail: + type: string + default: The requested operation requires a runtime secrets manager to be configured. ServiceUnavailableProblemResponse: x-implements: io.airbyte.api.problems.ProblemResponse type: object @@ -628,7 +690,7 @@ components: title: type: string default: A subscription is required for this operation to succeed. - BillingTooManyNonPrepaidSubscriptionsProblemResponse: + BillingInsufficientPaymentStatusProblemResponse: x-implements: io.airbyte.api.problems.ProblemResponse type: object allOf: @@ -637,14 +699,16 @@ components: properties: status: type: integer - default: 400 + default: 422 type: type: string - default: error:billing/subscription/too-many-incompatible-subscriptions + default: error:billing/insufficient-payment-status title: type: string - default: Found more than one subscription with some not being prepaid. - BillingInsufficientPaymentStatusProblemResponse: + default: The payment status of the associated Organization is insufficient. + data: + $ref: "#/components/schemas/BillingInsufficientPaymentStatusProblemData" + BillingInsufficientCreditBalanceProblemResponse: x-implements: io.airbyte.api.problems.ProblemResponse type: object allOf: @@ -656,13 +720,13 @@ components: default: 422 type: type: string - default: error:billing/insufficient-payment-status + default: error:billing/insufficient-credit-balance title: type: string - default: The payment status of the associated Organization is insufficient. + default: The credit balance of the associated Workspace or Organization is insufficient. data: - $ref: "#/components/schemas/BillingInsufficientPaymentStatusProblemData" - BillingInsufficientCreditBalanceProblemResponse: + $ref: "#/components/schemas/BillingInsufficientCreditBalanceProblemData" + BillingNoActiveSubscriptionProblemResponse: x-implements: io.airbyte.api.problems.ProblemResponse type: object allOf: @@ -671,15 +735,13 @@ components: properties: status: type: integer - default: 422 + default: 404 type: type: string - default: error:billing/insufficient-credit-balance + default: error:billing/no-active-subscription title: type: string - default: The credit balance of the associated Workspace or Organization is insufficient. - data: - $ref: "#/components/schemas/BillingInsufficientCreditBalanceProblemData" + default: The organization doesn't have an active subscription. ConnectorRolloutInvalidRequestProblemResponse: x-implements: io.airbyte.api.problems.ProblemResponse type: object @@ -750,6 +812,16 @@ components: type: string cronTimezone: type: string + ProblemMapperIdData: + type: object + required: + - mapperType + properties: + mapperId: + type: string + format: uuid + mapperType: + type: string ProblemMapperErrorsData: type: object required: @@ -776,6 +848,9 @@ components: - type - mapperConfiguration properties: + id: + type: string + format: uuid type: type: string mapperConfiguration: diff --git a/airbyte-api/server-api/build.gradle.kts b/airbyte-api/server-api/build.gradle.kts index fb103a3bb42..1fd6625cd4e 100644 --- a/airbyte-api/server-api/build.gradle.kts +++ b/airbyte-api/server-api/build.gradle.kts @@ -4,6 +4,23 @@ plugins { id("io.airbyte.gradle.jvm.lib") } +airbyte { + spotless { + excludes = + listOf( + "src/main/openapi/api.yaml", + "src/main/openapi/api_sdk.yaml", + "src/main/openapi/api_terraform.yaml", + "src/main/openapi/api_documentation_connections.yaml", + "src/main/openapi/api_documentation_sources.yaml", + "src/main/openapi/api_documentation_destinations.yaml", + "src/main/openapi/api_documentation_streams.yaml", + "src/main/openapi/api_documentation_jobs.yaml", + "src/main/openapi/api_documentation_workspaces.yaml", + ) + } +} + dependencies { annotationProcessor(libs.micronaut.openapi) @@ -295,8 +312,8 @@ private fun updateApiClientWithFailsafe(clientPath: String) { .readText() // replace class declaration .replace( - "open class ApiClient(val baseUrl: String, val client: OkHttpClient = defaultClient) {", - "open class ApiClient(val baseUrl: String, val client: OkHttpClient = defaultClient, val policy : RetryPolicy = RetryPolicy.ofDefaults()) {", + "open class ApiClient(val baseUrl: String, val client: Call.Factory = defaultClient) {", + "open class ApiClient(val baseUrl: String, val client: Call.Factory = defaultClient, val policy : RetryPolicy = RetryPolicy.ofDefaults()) {", ) // replace execute call .replace( @@ -327,9 +344,9 @@ private fun updateDomainClientsWithFailsafe(clientPath: String) { // replace class declaration domainClientFileText = domainClientFileText.replace( - "class (\\S+)\\(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient\\) : ApiClient\\(basePath, client\\)" + "class (\\S+)\\(basePath: kotlin.String = defaultBasePath, client: Call.Factory = ApiClient.defaultClient\\) : ApiClient\\(basePath, client\\)" .toRegex(), - "class $1(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient, policy : RetryPolicy = RetryPolicy.ofDefaults()) : ApiClient(basePath, client, policy)", + "class $1(basePath: kotlin.String = defaultBasePath, client: Call.Factory = ApiClient.defaultClient, policy : RetryPolicy = RetryPolicy.ofDefaults()) : ApiClient(basePath, client, policy)", ) // add imports if not exist diff --git a/airbyte-api/server-api/src/main/kotlin/io/airbyte/api/client/AirbyteApiClient.kt b/airbyte-api/server-api/src/main/kotlin/io/airbyte/api/client/AirbyteApiClient.kt index e9e4a99ea96..48a232d169e 100644 --- a/airbyte-api/server-api/src/main/kotlin/io/airbyte/api/client/AirbyteApiClient.kt +++ b/airbyte-api/server-api/src/main/kotlin/io/airbyte/api/client/AirbyteApiClient.kt @@ -10,6 +10,7 @@ import io.airbyte.api.client.generated.AttemptApi import io.airbyte.api.client.generated.ConnectionApi import io.airbyte.api.client.generated.ConnectorBuilderProjectApi import io.airbyte.api.client.generated.ConnectorRolloutApi +import io.airbyte.api.client.generated.DataplaneApi import io.airbyte.api.client.generated.DeploymentMetadataApi import io.airbyte.api.client.generated.DestinationApi import io.airbyte.api.client.generated.DestinationDefinitionApi @@ -71,6 +72,7 @@ class AirbyteApiClient( val connectionApi = ConnectionApi(basePath = basePath, client = httpClient, policy = policy) val connectorBuilderProjectApi = ConnectorBuilderProjectApi(basePath = basePath, client = httpClient, policy = policy) val connectorRolloutApi = ConnectorRolloutApi(basePath = basePath, client = httpClient, policy = policy) + val dataplaneApi = DataplaneApi(basePath = basePath, client = httpClient, policy = policy) val deploymentMetadataApi = DeploymentMetadataApi(basePath = basePath, client = httpClient, policy = policy) val destinationApi = DestinationApi(basePath = basePath, client = httpClient, policy = policy) val destinationDefinitionApi = DestinationDefinitionApi(basePath = basePath, client = httpClient, policy = policy) diff --git a/airbyte-api/server-api/src/main/openapi/api.yaml b/airbyte-api/server-api/src/main/openapi/api.yaml index e10f000587b..ab9505ca527 100644 --- a/airbyte-api/server-api/src/main/openapi/api.yaml +++ b/airbyte-api/server-api/src/main/openapi/api.yaml @@ -2,7 +2,7 @@ openapi: 3.1.0 info: title: airbyte-api version: 1.0.0 - description: Programatically control Airbyte Cloud, OSS & Enterprise. + description: Programmatically control Airbyte Cloud, OSS & Enterprise. servers: - url: https://api.airbyte.com/v1 description: Airbyte API v1 @@ -1805,6 +1805,7 @@ components: - hashing - field-renaming - row-filtering + - encryption x-speakeasy-component: true ConfiguredStreamMapper: type: object @@ -2236,6 +2237,7 @@ components: - schedule - dataResidency - configurations + - createdAt properties: connectionId: format: UUID @@ -2267,6 +2269,9 @@ components: type: string configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: int64 + type: integer x-speakeasy-entity: Connection x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -2384,6 +2389,7 @@ components: - definitionId - workspaceId - configuration + - createdAt properties: destinationId: format: UUID @@ -2400,6 +2406,9 @@ components: type: string configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: int64 + type: integer example: destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 name: Analytics Team Postgres @@ -2418,6 +2427,7 @@ components: - workspaceId - configuration - definitionId + - createdAt properties: sourceId: format: UUID @@ -2434,6 +2444,9 @@ components: type: string configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: int64 + type: integer example: sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 name: Analytics Team Postgres diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_applications.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_applications.yaml index eca2e9d5bfe..8696bc48897 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_applications.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_applications.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Applications" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -269,6 +269,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -628,6 +629,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -659,6 +661,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -776,6 +781,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -792,6 +798,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -810,6 +819,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -826,6 +836,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_connections.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_connections.yaml index 600575eb4fd..4b36bb76387 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_connections.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_connections.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Connections" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -292,6 +292,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -651,6 +652,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -682,6 +684,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -799,6 +804,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -815,6 +821,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -833,6 +842,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -849,6 +859,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_destinations.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_destinations.yaml index 84a03eb8588..80bde8489bd 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_destinations.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_destinations.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Destinations" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -361,6 +361,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -720,6 +721,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -751,6 +753,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -868,6 +873,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -884,6 +890,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -902,6 +911,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -918,6 +928,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -14867,29 +14880,34 @@ components: - "destinationType" properties: motherduck_api_key: - title: "MotherDuck API Key" + title: "MotherDuck Access Token" type: "string" - description: "API key to use for authentication to a MotherDuck database." + description: + "API access token to use for authentication to a MotherDuck\ + \ database." airbyte_secret: true x-speakeasy-param-sensitive: true destination_path: title: "Destination DB" type: "string" description: - "Path to the .duckdb file, or the text 'md:' to connect to\ - \ MotherDuck. The file will be placed inside that local mount. For more\ - \ information check out our docs" + "Path to a .duckdb file or 'md:' to connect\ + \ to a MotherDuck database. If 'md:' is specified without a database name,\ + \ the default MotherDuck database name ('my_db') will be used." examples: - "/local/destination.duckdb" - "md:" - - "motherduck:" + - "md:data_db" + - "md:my_db" default: "md:" schema: - title: "Destination Schema" + title: "Schema Name" type: "string" - description: "Database schema name, default for duckdb is 'main'." - example: "main" + description: "Database schema name, defaults to 'main' if not specified." + examples: + - "main" + - "airbyte_raw" + - "my_schema" destinationType: title: "motherduck" const: "motherduck" @@ -14904,28 +14922,33 @@ components: - "motherduck_api_key" properties: motherduck_api_key: - title: "MotherDuck API Key" + title: "MotherDuck Access Token" type: "string" - description: "API key to use for authentication to a MotherDuck database." + description: + "API access token to use for authentication to a MotherDuck\ + \ database." airbyte_secret: true destination_path: title: "Destination DB" type: "string" description: - "Path to the .duckdb file, or the text 'md:' to connect to\ - \ MotherDuck. The file will be placed inside that local mount. For more\ - \ information check out our docs" + "Path to a .duckdb file or 'md:' to connect\ + \ to a MotherDuck database. If 'md:' is specified without a database name,\ + \ the default MotherDuck database name ('my_db') will be used." examples: - "/local/destination.duckdb" - "md:" - - "motherduck:" + - "md:data_db" + - "md:my_db" default: "md:" schema: - title: "Destination Schema" + title: "Schema Name" type: "string" - description: "Database schema name, default for duckdb is 'main'." - example: "main" + description: "Database schema name, defaults to 'main' if not specified." + examples: + - "main" + - "airbyte_raw" + - "my_schema" destination-s3: title: "S3 Destination Spec" type: "object" diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_jobs.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_jobs.yaml index 081b3b3b05e..6348a36a7b0 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_jobs.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_jobs.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Jobs" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -312,6 +312,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -671,6 +672,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -702,6 +704,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -819,6 +824,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -835,6 +841,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -853,6 +862,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -869,6 +879,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_organizations.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_organizations.yaml index f605998dfab..6a89cc5de87 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_organizations.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_organizations.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Organizations" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -122,6 +122,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -481,6 +482,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -512,6 +514,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -629,6 +634,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -645,6 +651,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -663,6 +672,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -679,6 +689,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_permissions.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_permissions.yaml index e5cb83254be..155844fbe15 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_permissions.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_permissions.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Permissions" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -258,6 +258,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -617,6 +618,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -648,6 +650,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -765,6 +770,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -781,6 +787,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -799,6 +808,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -815,6 +825,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_sources.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_sources.yaml index d263fbf8ef8..ab6a841b5e8 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_sources.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_sources.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Sources" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -375,6 +375,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -734,6 +735,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -765,6 +767,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -882,6 +887,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -898,6 +904,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -916,6 +925,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -932,6 +942,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -1468,6 +1481,71 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-google-forms: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "form_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + form_id: + type: "array" + order: 3 + title: "Form IDs" + sourceType: + title: "google-forms" + const: "google-forms" + enum: + - "google-forms" + order: 0 + type: "string" + source-google-forms-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "form_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + form_id: + type: "array" + order: 3 + title: "Form IDs" source-the-guardian-api: title: "The Guardian Api Spec" type: "object" @@ -1866,6 +1944,65 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-google-classroom: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + title: "OAuth Client ID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + title: "OAuth Client Secret" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "google-classroom" + const: "google-classroom" + enum: + - "google-classroom" + order: 0 + type: "string" + source-google-classroom-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + properties: + client_id: + type: "string" + name: "client_id" + title: "OAuth Client ID" + airbyte_secret: true + order: 0 + client_secret: + type: "string" + name: "client_secret" + title: "OAuth Client Secret" + airbyte_secret: true + order: 1 + client_refresh_token: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 2 source-klaviyo: title: "Klaviyo Spec" type: "object" @@ -1902,6 +2039,23 @@ components: \ the \"predictive_analytics\" column from being populated in your downstream\ \ destination." order: 2 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 3 sourceType: title: "klaviyo" const: "klaviyo" @@ -1947,6 +2101,23 @@ components: \ the \"predictive_analytics\" column from being populated in your downstream\ \ destination." order: 2 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 3 required: - "api_key" source-datadog: @@ -2240,7 +2411,6 @@ components: title: "API Key" airbyte_secret: true source-quickbooks: - title: "Source QuickBooks Spec" type: "object" required: - "credentials" @@ -2249,9 +2419,7 @@ components: - "sourceType" properties: credentials: - title: "Authorization Method" type: "object" - order: 0 oneOf: - type: "object" title: "OAuth2.0" @@ -2263,6 +2431,14 @@ components: - "token_expiry_date" - "realm_id" properties: + realm_id: + type: "string" + title: "Realm ID" + description: + "Labeled Company ID. The Make API Calls panel is populated\ + \ with the realm id and the current access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true auth_type: type: "string" const: "oauth2.0" @@ -2276,58 +2452,52 @@ components: \ value from the Keys tab on the app profile via My Apps on the\ \ developer site. There are two versions of this key: development\ \ and production." + access_token: + type: "string" + title: "Access Token" + description: "Access token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true client_secret: + type: "string" + title: "Client Secret" description: " Obtain this value from the Keys tab on the app profile\ \ via My Apps on the developer site. There are two versions of this\ \ key: development and production." - title: "Client Secret" - type: "string" airbyte_secret: true x-speakeasy-param-sensitive: true refresh_token: - description: "A token used when refreshing the access token." - title: "Refresh Token" - type: "string" - airbyte_secret: true - x-speakeasy-param-sensitive: true - access_token: - description: "Access token for making authenticated requests." - title: "Access Token" type: "string" + title: "Refresh Token" + description: "A token used when refreshing the access token." airbyte_secret: true x-speakeasy-param-sensitive: true token_expiry_date: type: "string" title: "Token Expiry Date" - description: "The date-time when the access token should be refreshed." format: "date-time" - realm_id: - description: - "Labeled Company ID. The Make API Calls panel is populated\ - \ with the realm id and the current access token." - title: "Realm ID" - type: "string" - airbyte_secret: true - x-speakeasy-param-sensitive: true + description: "The date-time when the access token should be refreshed." + order: 0 + title: "Authorization Method" start_date: + type: "string" order: 1 - description: - "The default value to use if no bookmark exists for an endpoint\ - \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ - \ date will not be replicated." title: "Start Date" - type: "string" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-03-20T00:00:00Z" + description: + "The default value to use if no bookmark exists for an endpoint\ + \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ + \ date will not be replicated." sandbox: + type: "boolean" order: 2 - description: "Determines whether to use the sandbox or production environment." title: "Sandbox" - type: "boolean" default: false + description: "Determines whether to use the sandbox or production environment." sourceType: title: "quickbooks" const: "quickbooks" @@ -2336,7 +2506,6 @@ components: order: 0 type: "string" source-quickbooks-update: - title: "Source QuickBooks Spec" type: "object" required: - "credentials" @@ -2344,9 +2513,7 @@ components: - "sandbox" properties: credentials: - title: "Authorization Method" type: "object" - order: 0 oneOf: - type: "object" title: "OAuth2.0" @@ -2358,6 +2525,13 @@ components: - "token_expiry_date" - "realm_id" properties: + realm_id: + type: "string" + title: "Realm ID" + description: + "Labeled Company ID. The Make API Calls panel is populated\ + \ with the realm id and the current access token." + airbyte_secret: true auth_type: type: "string" const: "oauth2.0" @@ -2371,54 +2545,94 @@ components: \ value from the Keys tab on the app profile via My Apps on the\ \ developer site. There are two versions of this key: development\ \ and production." + access_token: + type: "string" + title: "Access Token" + description: "Access token for making authenticated requests." + airbyte_secret: true client_secret: + type: "string" + title: "Client Secret" description: " Obtain this value from the Keys tab on the app profile\ \ via My Apps on the developer site. There are two versions of this\ \ key: development and production." - title: "Client Secret" - type: "string" airbyte_secret: true refresh_token: - description: "A token used when refreshing the access token." - title: "Refresh Token" - type: "string" - airbyte_secret: true - access_token: - description: "Access token for making authenticated requests." - title: "Access Token" type: "string" + title: "Refresh Token" + description: "A token used when refreshing the access token." airbyte_secret: true token_expiry_date: type: "string" title: "Token Expiry Date" - description: "The date-time when the access token should be refreshed." format: "date-time" - realm_id: - description: - "Labeled Company ID. The Make API Calls panel is populated\ - \ with the realm id and the current access token." - title: "Realm ID" - type: "string" - airbyte_secret: true + description: "The date-time when the access token should be refreshed." + order: 0 + title: "Authorization Method" start_date: + type: "string" order: 1 - description: - "The default value to use if no bookmark exists for an endpoint\ - \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ - \ date will not be replicated." title: "Start Date" - type: "string" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-03-20T00:00:00Z" + description: + "The default value to use if no bookmark exists for an endpoint\ + \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ + \ date will not be replicated." sandbox: + type: "boolean" order: 2 - description: "Determines whether to use the sandbox or production environment." title: "Sandbox" - type: "boolean" default: false + description: "Determines whether to use the sandbox or production environment." + source-flowlu: + type: "object" + required: + - "api_key" + - "company" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + company: + type: "string" + name: "company" + order: 1 + title: "Company" + sourceType: + title: "flowlu" + const: "flowlu" + enum: + - "flowlu" + order: 0 + type: "string" + source-flowlu-update: + type: "object" + required: + - "api_key" + - "company" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + company: + type: "string" + name: "company" + order: 1 + title: "Company" source-beamer: type: "object" required: @@ -3014,6 +3228,49 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-mailosaur: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + description: 'Enter "api" here' + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your api key here" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "mailosaur" + const: "mailosaur" + enum: + - "mailosaur" + order: 0 + type: "string" + source-mailosaur-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + description: 'Enter "api" here' + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your api key here" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true source-buildkite: type: "object" required: @@ -3718,6 +3975,77 @@ components: examples: - "https://api.kaon.kyve.network/" - "https://api.korellia.kyve.network/" + source-capsule-crm: + type: "object" + required: + - "bearer_token" + - "start_date" + - "entity" + - "sourceType" + properties: + bearer_token: + type: "string" + description: + "Bearer token to authenticate API requests. Generate it from\ + \ the 'My Preferences' > 'API Authentication Tokens' page in your Capsule\ + \ account." + name: "bearer_token" + order: 0 + title: "Bearer Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + entity: + type: "string" + title: "Entity" + enum: + - "parties" + - "opportunities" + - "kases" + order: 2 + sourceType: + title: "capsule-crm" + const: "capsule-crm" + enum: + - "capsule-crm" + order: 0 + type: "string" + source-capsule-crm-update: + type: "object" + required: + - "bearer_token" + - "start_date" + - "entity" + properties: + bearer_token: + type: "string" + description: + "Bearer token to authenticate API requests. Generate it from\ + \ the 'My Preferences' > 'API Authentication Tokens' page in your Capsule\ + \ account." + name: "bearer_token" + order: 0 + title: "Bearer Token" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + entity: + type: "string" + title: "Entity" + enum: + - "parties" + - "opportunities" + - "kases" + order: 2 source-sigma-computing: type: "object" required: @@ -4128,6 +4456,48 @@ components: default: 100000 minimum: 15000 maximum: 200000 + source-shippo: + type: "object" + required: + - "shippo_token" + - "start_date" + - "sourceType" + properties: + shippo_token: + type: "string" + description: "The bearer token used for making requests" + title: "Shippo Token" + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "shippo" + const: "shippo" + enum: + - "shippo" + order: 0 + type: "string" + source-shippo-update: + type: "object" + required: + - "shippo_token" + - "start_date" + properties: + shippo_token: + type: "string" + description: "The bearer token used for making requests" + title: "Shippo Token" + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 source-coda: type: "object" required: @@ -5573,7 +5943,6 @@ components: - "client_id" - "client_secret" - "tenant_id" - - "application_id_uri" - "user_id" - "sourceType" properties: @@ -5595,15 +5964,9 @@ components: title: "Tenant Id" airbyte_secret: true x-speakeasy-param-sensitive: true - application_id_uri: - type: "string" - order: 3 - title: "Application Id URI" - airbyte_secret: true - x-speakeasy-param-sensitive: true user_id: type: "string" - order: 4 + order: 3 title: "User Id" airbyte_secret: true x-speakeasy-param-sensitive: true @@ -5620,7 +5983,6 @@ components: - "client_id" - "client_secret" - "tenant_id" - - "application_id_uri" - "user_id" properties: client_id: @@ -5638,16 +6000,62 @@ components: order: 2 title: "Tenant Id" airbyte_secret: true - application_id_uri: + user_id: type: "string" order: 3 - title: "Application Id URI" + title: "User Id" airbyte_secret: true - user_id: + source-less-annoying-crm: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: type: "string" - order: 4 - title: "User Id" + description: + "API key to use. Manage and create your API keys on the Programmer\ + \ API settings page at https://account.lessannoyingcrm.com/app/Settings/Api." + name: "api_key" + order: 0 + title: "API Key" airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "less-annoying-crm" + const: "less-annoying-crm" + enum: + - "less-annoying-crm" + order: 0 + type: "string" + source-less-annoying-crm-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Manage and create your API keys on the Programmer\ + \ API settings page at https://account.lessannoyingcrm.com/app/Settings/Api." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-planhat: type: "object" required: @@ -5683,6 +6091,39 @@ components: order: 0 title: "API Token" airbyte_secret: true + source-encharge: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "encharge" + const: "encharge" + enum: + - "encharge" + order: 0 + type: "string" + source-encharge-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-shortio: title: "Shortio Spec" type: "object" @@ -5779,6 +6220,146 @@ components: title: "Rest API Key" airbyte_secret: true description: "Instatus REST API key" + source-flexmail: + type: "object" + required: + - "account_id" + - "personal_access_token" + - "sourceType" + properties: + account_id: + type: "string" + description: + "Your Flexmail account ID. You can find it in your Flexmail\ + \ account settings." + name: "account_id" + order: 0 + title: "Account ID" + personal_access_token: + type: "string" + description: + "A personal access token for API authentication. Manage your\ + \ tokens in Flexmail under Settings > API > Personal access tokens." + name: "personal_access_token" + order: 1 + title: "Personal Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "flexmail" + const: "flexmail" + enum: + - "flexmail" + order: 0 + type: "string" + source-flexmail-update: + type: "object" + required: + - "account_id" + - "personal_access_token" + properties: + account_id: + type: "string" + description: + "Your Flexmail account ID. You can find it in your Flexmail\ + \ account settings." + name: "account_id" + order: 0 + title: "Account ID" + personal_access_token: + type: "string" + description: + "A personal access token for API authentication. Manage your\ + \ tokens in Flexmail under Settings > API > Personal access tokens." + name: "personal_access_token" + order: 1 + title: "Personal Access Token" + airbyte_secret: true + source-openfda: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "openfda" + const: "openfda" + enum: + - "openfda" + order: 0 + type: "string" + source-openfda-update: + type: "object" + required: [] + properties: {} + source-elasticemail: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + scope_type: + type: "string" + enum: + - "Personal" + - "Global" + order: 1 + title: "scope type" + from: + type: "string" + order: 2 + title: "From" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "elasticemail" + const: "elasticemail" + enum: + - "elasticemail" + order: 0 + type: "string" + source-elasticemail-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + scope_type: + type: "string" + enum: + - "Personal" + - "Global" + order: 1 + title: "scope type" + from: + type: "string" + order: 2 + title: "From" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 source-vwo: type: "object" required: @@ -6709,6 +7290,39 @@ components: \ data before this date will not be replicated." type: "string" format: "date-time" + source-paperform: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "paperform" + const: "paperform" + enum: + - "paperform" + order: 0 + type: "string" + source-paperform-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-microsoft-sharepoint: title: "Microsoft SharePoint Source Spec" description: @@ -8118,6 +8732,35 @@ components: >here. The token is case sensitive." airbyte_secret: true order: 4 + source-tinyemail: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "tinyemail" + const: "tinyemail" + enum: + - "tinyemail" + order: 0 + type: "string" + source-tinyemail-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true source-lob: type: "object" required: @@ -8275,6 +8918,117 @@ components: description: "Start date" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" order: 2 + source-apptivo: + type: "object" + required: + - "api_key" + - "access_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your Apptivo account under Business\ + \ Settings -> API Access." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_key: + type: "string" + order: 1 + title: "Access Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "apptivo" + const: "apptivo" + enum: + - "apptivo" + order: 0 + type: "string" + source-apptivo-update: + type: "object" + required: + - "api_key" + - "access_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your Apptivo account under Business\ + \ Settings -> API Access." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + access_key: + type: "string" + order: 1 + title: "Access Key" + airbyte_secret: true + source-zonka-feedback: + type: "object" + required: + - "datacenter" + - "auth_token" + - "sourceType" + properties: + datacenter: + type: "string" + description: + "The identifier for the data center, such as 'us1' or 'e' for\ + \ EU." + enum: + - "us1" + - "e" + name: "dc_id" + order: 0 + title: "Data Center ID" + auth_token: + type: "string" + description: + "Auth token to use. Generate it by navigating to Company Settings\ + \ > Developers > API in your Zonka Feedback account." + name: "auth_token" + order: 1 + title: "Auth Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zonka-feedback" + const: "zonka-feedback" + enum: + - "zonka-feedback" + order: 0 + type: "string" + source-zonka-feedback-update: + type: "object" + required: + - "datacenter" + - "auth_token" + properties: + datacenter: + type: "string" + description: + "The identifier for the data center, such as 'us1' or 'e' for\ + \ EU." + enum: + - "us1" + - "e" + name: "dc_id" + order: 0 + title: "Data Center ID" + auth_token: + type: "string" + description: + "Auth token to use. Generate it by navigating to Company Settings\ + \ > Developers > API in your Zonka Feedback account." + name: "auth_token" + order: 1 + title: "Auth Token" + airbyte_secret: true source-orb: type: "object" required: @@ -9165,6 +9919,199 @@ components: type: "string" order: 2 title: "workspace" + source-fillout: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Developer settings tab of your\ + \ Fillout account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "fillout" + const: "fillout" + enum: + - "fillout" + order: 0 + type: "string" + source-fillout-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Developer settings tab of your\ + \ Fillout account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-inflowinventory: + type: "object" + required: + - "api_key" + - "companyid" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + companyid: + type: "string" + order: 1 + title: "CompanyID" + sourceType: + title: "inflowinventory" + const: "inflowinventory" + enum: + - "inflowinventory" + order: 0 + type: "string" + source-inflowinventory-update: + type: "object" + required: + - "api_key" + - "companyid" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + companyid: + type: "string" + order: 1 + title: "CompanyID" + source-clockodo: + type: "object" + required: + - "api_key" + - "email_address" + - "external_application" + - "years" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the 'Personal data' section of\ + \ your Clockodo account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + email_address: + type: "string" + description: + "Your Clockodo account email address. Find it in your Clockodo\ + \ account settings." + name: "email_address" + order: 1 + title: "Email Address" + external_application: + type: "string" + description: + "Identification of the calling application, including the email\ + \ address of a technical contact person. Format: [name of application\ + \ or company];[email address]." + name: "external_application" + order: 2 + title: "External Application Header" + default: "Airbyte" + years: + type: "array" + description: "2024, 2025" + title: "Years" + order: 3 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 + sourceType: + title: "clockodo" + const: "clockodo" + enum: + - "clockodo" + order: 0 + type: "string" + source-clockodo-update: + type: "object" + required: + - "api_key" + - "email_address" + - "external_application" + - "years" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the 'Personal data' section of\ + \ your Clockodo account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + email_address: + type: "string" + description: + "Your Clockodo account email address. Find it in your Clockodo\ + \ account settings." + name: "email_address" + order: 1 + title: "Email Address" + external_application: + type: "string" + description: + "Identification of the calling application, including the email\ + \ address of a technical contact person. Format: [name of application\ + \ or company];[email address]." + name: "external_application" + order: 2 + title: "External Application Header" + default: "Airbyte" + years: + type: "array" + description: "2024, 2025" + title: "Years" + order: 3 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 source-google-webfonts: type: "object" required: @@ -10308,6 +11255,103 @@ components: title: "API Key" airbyte_secret: true order: 0 + source-zoho-analytics-metadata-api: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "data_center" + - "org_id" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 2 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - "com.cn" + - "jp" + order: 3 + title: "Data Center" + default: "com" + org_id: + type: "number" + order: 4 + title: "Org Id" + sourceType: + title: "zoho-analytics-metadata-api" + const: "zoho-analytics-metadata-api" + enum: + - "zoho-analytics-metadata-api" + order: 0 + type: "string" + source-zoho-analytics-metadata-api-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "data_center" + - "org_id" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 2 + title: "OAuth Refresh Token" + airbyte_secret: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - "com.cn" + - "jp" + order: 3 + title: "Data Center" + default: "com" + org_id: + type: "number" + order: 4 + title: "Org Id" source-buzzsprout: type: "object" required: @@ -10363,6 +11407,43 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 2 + source-rocketlane: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the API section in Settings\ + \ of your Rocketlane account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "rocketlane" + const: "rocketlane" + enum: + - "rocketlane" + order: 0 + type: "string" + source-rocketlane-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the API section in Settings\ + \ of your Rocketlane account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-youtube-analytics: title: "YouTube Analytics Spec" type: "object" @@ -10438,6 +11519,35 @@ components: "A refresh token generated using the above client ID and\ \ secret" airbyte_secret: true + source-systeme: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "systeme" + const: "systeme" + enum: + - "systeme" + order: 0 + type: "string" + source-systeme-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true source-zendesk-talk: type: "object" title: "Source Zendesk Talk Spec" @@ -10677,6 +11787,76 @@ components: \ example.thinkific.com, your subdomain is \"example\"." order: 1 title: "subdomain" + source-papersign: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "papersign" + const: "papersign" + enum: + - "papersign" + order: 0 + type: "string" + source-papersign-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-eventzilla: + type: "object" + required: + - "x-api-key" + - "sourceType" + properties: + x-api-key: + type: "string" + description: + "API key to use. Generate it by creating a new application\ + \ within your Eventzilla account settings under Settings > App Management." + name: "x-api-key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "eventzilla" + const: "eventzilla" + enum: + - "eventzilla" + order: 0 + type: "string" + source-eventzilla-update: + type: "object" + required: + - "x-api-key" + properties: + x-api-key: + type: "string" + description: + "API key to use. Generate it by creating a new application\ + \ within your Eventzilla account settings under Settings > App Management." + name: "x-api-key" + order: 0 + title: "API Key" + airbyte_secret: true source-plausible: type: "object" required: @@ -10940,6 +12120,85 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-zoho-campaign: + type: "object" + required: + - "client_id_2" + - "client_secret_2" + - "client_refresh_token" + - "data_center" + - "sourceType" + properties: + client_id_2: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret_2: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - ".jp" + - ".com.cn" + order: 3 + title: "Data Center" + sourceType: + title: "zoho-campaign" + const: "zoho-campaign" + enum: + - "zoho-campaign" + order: 0 + type: "string" + source-zoho-campaign-update: + type: "object" + required: + - "client_id_2" + - "client_secret_2" + - "client_refresh_token" + - "data_center" + properties: + client_id_2: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret_2: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - ".jp" + - ".com.cn" + order: 3 + title: "Data Center" source-oura: type: "object" required: @@ -10999,6 +12258,46 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" description: "End datetime to sync until. Default is current UTC datetime." + source-cin7: + type: "object" + required: + - "accountid" + - "api_key" + - "sourceType" + properties: + accountid: + type: "string" + description: "The ID associated with your account." + order: 0 + title: "Account ID" + api_key: + type: "string" + description: "The API key associated with your account." + order: 1 + title: "API Key" + sourceType: + title: "cin7" + const: "cin7" + enum: + - "cin7" + order: 0 + type: "string" + source-cin7-update: + type: "object" + required: + - "accountid" + - "api_key" + properties: + accountid: + type: "string" + description: "The ID associated with your account." + order: 0 + title: "Account ID" + api_key: + type: "string" + description: "The API key associated with your account." + order: 1 + title: "API Key" source-looker: type: "object" required: @@ -11135,6 +12434,55 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-campayn: + type: "object" + required: + - "sub_domain" + - "api_key" + - "sourceType" + properties: + sub_domain: + type: "string" + name: "domain" + title: "Sub Domain" + order: 0 + api_key: + type: "string" + description: + "API key to use. Find it in your Campayn account settings.\ + \ Keep it secure as it grants access to your Campayn data." + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "campayn" + const: "campayn" + enum: + - "campayn" + order: 0 + type: "string" + source-campayn-update: + type: "object" + required: + - "sub_domain" + - "api_key" + properties: + sub_domain: + type: "string" + name: "domain" + title: "Sub Domain" + order: 0 + api_key: + type: "string" + description: + "API key to use. Find it in your Campayn account settings.\ + \ Keep it secure as it grants access to your Campayn data." + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 1 source-google-tasks: type: "object" required: @@ -12552,6 +13900,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 7 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" host: title: "Host Address" description: "The server host address" @@ -12606,7 +13997,9 @@ components: description: "The Private key" multiline: true order: 4 + airbyte_secret: true type: "string" + x-speakeasy-param-sensitive: true required: - "private_key" - "auth_type" @@ -13114,6 +14507,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 7 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" host: title: "Host Address" description: "The server host address" @@ -13167,6 +14603,7 @@ components: description: "The Private key" multiline: true order: 4 + airbyte_secret: true type: "string" required: - "private_key" @@ -14541,6 +15978,57 @@ components: default: false order: 4 type: "boolean" + source-oncehub: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your OnceHub account under the\ + \ API & Webhooks Integration page." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "oncehub" + const: "oncehub" + enum: + - "oncehub" + order: 0 + type: "string" + source-oncehub-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your OnceHub account under the\ + \ API & Webhooks Integration page." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-aws-cloudtrail: title: "Aws CloudTrail Spec" type: "object" @@ -14762,6 +16250,18 @@ components: \ for more info." default: false order: 6 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: "The number of worker threads to use for the sync." + order: 7 sourceType: title: "jira" const: "jira" @@ -14854,6 +16354,18 @@ components: \ for more info." default: false order: 6 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: "The number of worker threads to use for the sync." + order: 7 source-smartwaiver: type: "object" required: @@ -14915,6 +16427,61 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 2 + source-bunny-inc: + type: "object" + required: + - "subdomain" + - "apikey" + - "sourceType" + properties: + apikey: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "The subdomain specific to your Bunny account or service." + name: "subdomain" + order: 0 + title: "Subdomain" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "bunny-inc" + const: "bunny-inc" + enum: + - "bunny-inc" + order: 0 + type: "string" + source-bunny-inc-update: + type: "object" + required: + - "subdomain" + - "apikey" + properties: + apikey: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "The subdomain specific to your Bunny account or service." + name: "subdomain" + order: 0 + title: "Subdomain" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-hubspot: title: "HubSpot Source Spec" type: "object" @@ -15222,6 +16789,86 @@ components: \ pagination will begin with that number to end of available comics" default: "2960" order: 0 + source-jobnimbus: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it by logging into your JobNimbus account,\ + \ navigating to settings, and creating a new API key under the API section." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "jobnimbus" + const: "jobnimbus" + enum: + - "jobnimbus" + order: 0 + type: "string" + source-jobnimbus-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it by logging into your JobNimbus account,\ + \ navigating to settings, and creating a new API key under the API section." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-marketstack: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "marketstack" + const: "marketstack" + enum: + - "marketstack" + order: 0 + type: "string" + source-marketstack-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 source-zenloop: title: "Zenloop Spec" type: "object" @@ -15393,6 +17040,23 @@ components: - "2.0" default: "2.0" order: 3 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 4 sourceType: title: "chargebee" const: "chargebee" @@ -15447,6 +17111,23 @@ components: - "2.0" default: "2.0" order: 3 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 4 source-wrike: type: "object" required: @@ -16481,6 +18162,45 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-firehydrant: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Bot token to use for authenticating with the FireHydrant API.\ + \ You can find or create a bot token by logging into your organization\ + \ and visiting the Bot users page at https://app.firehydrant.io/organizations/bots." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "firehydrant" + const: "firehydrant" + enum: + - "firehydrant" + order: 0 + type: "string" + source-firehydrant-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Bot token to use for authenticating with the FireHydrant API.\ + \ You can find or create a bot token by logging into your organization\ + \ and visiting the Bot users page at https://app.firehydrant.io/organizations/bots." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-concord: type: "object" required: @@ -16532,6 +18252,64 @@ components: name: "organizationId" order: 1 title: "Environment" + source-e-conomic: + type: "object" + required: + - "app_secret_token" + - "agreement_grant_token" + - "sourceType" + properties: + app_secret_token: + type: "string" + description: + "Your private token that identifies your app. Find it in your\ + \ e-conomic account settings." + name: "app_secret_token" + order: 0 + title: "App Secret Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + agreement_grant_token: + type: "string" + description: + "Token that identifies the grant issued by an agreement, allowing\ + \ your app to access data. Obtain it from your e-conomic account settings." + name: "agreement_grant_token" + order: 1 + title: "Agreement Grant Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "e-conomic" + const: "e-conomic" + enum: + - "e-conomic" + order: 0 + type: "string" + source-e-conomic-update: + type: "object" + required: + - "app_secret_token" + - "agreement_grant_token" + properties: + app_secret_token: + type: "string" + description: + "Your private token that identifies your app. Find it in your\ + \ e-conomic account settings." + name: "app_secret_token" + order: 0 + title: "App Secret Token" + airbyte_secret: true + agreement_grant_token: + type: "string" + description: + "Token that identifies the grant issued by an agreement, allowing\ + \ your app to access data. Obtain it from your e-conomic account settings." + name: "agreement_grant_token" + order: 1 + title: "Agreement Grant Token" + airbyte_secret: true source-appfollow: type: "object" required: @@ -16838,6 +18616,55 @@ components: description: "API Secret" airbyte_secret: true order: 0 + source-gitbook: + type: "object" + required: + - "access_token" + - "space_id" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Personal access token for authenticating with the GitBook\ + \ API. You can view and manage your access tokens in the Developer settings\ + \ of your GitBook user account." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + space_id: + type: "string" + order: 1 + title: "Space Id" + sourceType: + title: "gitbook" + const: "gitbook" + enum: + - "gitbook" + order: 0 + type: "string" + source-gitbook-update: + type: "object" + required: + - "access_token" + - "space_id" + properties: + access_token: + type: "string" + description: + "Personal access token for authenticating with the GitBook\ + \ API. You can view and manage your access tokens in the Developer settings\ + \ of your GitBook user account." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + space_id: + type: "string" + order: 1 + title: "Space Id" source-miro: type: "object" required: @@ -17160,6 +18987,111 @@ components: \ issues fetching the stream, or checking the connection please set this\ \ to `False` instead." default: true + source-finnworlds: + type: "object" + required: + - "key" + - "start_date" + - "sourceType" + properties: + list: + type: "string" + description: "Choose isin, ticker, reg_lei or cik" + order: 0 + title: "List" + default: "ticker" + list_countries_for_bonds: + type: "string" + order: 1 + title: "List Countries for Bonds" + default: "country" + key: + type: "string" + order: 2 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + bond_type: + type: "array" + description: "For example 10y, 5y, 2y..." + order: 3 + title: "Bond Type" + countries: + type: "array" + description: "brazil, united states, italia, japan" + order: 4 + title: "Countries" + tickers: + type: "array" + description: "AAPL, T, MU, GOOG" + order: 5 + title: "Tickers" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + commodities: + type: "array" + description: "Options Available: beef, cheese, oil, ..." + order: 7 + title: "Commodities" + sourceType: + title: "finnworlds" + const: "finnworlds" + enum: + - "finnworlds" + order: 0 + type: "string" + source-finnworlds-update: + type: "object" + required: + - "key" + - "start_date" + properties: + list: + type: "string" + description: "Choose isin, ticker, reg_lei or cik" + order: 0 + title: "List" + default: "ticker" + list_countries_for_bonds: + type: "string" + order: 1 + title: "List Countries for Bonds" + default: "country" + key: + type: "string" + order: 2 + title: "API Key" + airbyte_secret: true + bond_type: + type: "array" + description: "For example 10y, 5y, 2y..." + order: 3 + title: "Bond Type" + countries: + type: "array" + description: "brazil, united states, italia, japan" + order: 4 + title: "Countries" + tickers: + type: "array" + description: "AAPL, T, MU, GOOG" + order: 5 + title: "Tickers" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + commodities: + type: "array" + description: "Options Available: beef, cheese, oil, ..." + order: 7 + title: "Commodities" source-google-calendar: type: "object" required: @@ -17456,12 +19388,178 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-zoho-billing: + type: "object" + required: + - "region" + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + name: "region" + order: 0 + title: "Region" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zoho-billing" + const: "zoho-billing" + enum: + - "zoho-billing" + order: 0 + type: "string" + source-zoho-billing-update: + type: "object" + required: + - "region" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + name: "region" + order: 0 + title: "Region" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + source-akeneo: + type: "object" + required: + - "host" + - "api_username" + - "password" + - "client_id" + - "sourceType" + properties: + host: + type: "string" + description: "https://cb8715249e.trial.akeneo.cloud" + order: 0 + title: "Host" + api_username: + type: "string" + order: 1 + title: "API Username" + password: + type: "string" + order: 2 + title: "Password" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + order: 3 + title: "Client ID" + secret: + type: "string" + order: 4 + title: "Secret" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "akeneo" + const: "akeneo" + enum: + - "akeneo" + order: 0 + type: "string" + source-akeneo-update: + type: "object" + required: + - "host" + - "api_username" + - "password" + - "client_id" + properties: + host: + type: "string" + description: "https://cb8715249e.trial.akeneo.cloud" + order: 0 + title: "Host" + api_username: + type: "string" + order: 1 + title: "API Username" + password: + type: "string" + order: 2 + title: "Password" + airbyte_secret: true + client_id: + type: "string" + order: 3 + title: "Client ID" + secret: + type: "string" + order: 4 + title: "Secret" + always_show: true + airbyte_secret: true source-amazon-ads: - title: "Amazon Ads Spec" + title: "Source Amazon Ads" type: "object" properties: auth_type: title: "Auth Type" + default: "oauth2.0" const: "oauth2.0" order: 0 type: "string" @@ -17473,9 +19571,9 @@ components: "The client ID of your Amazon Ads developer application. See\ \ the docs for more information." + airbyte_secret: true order: 1 type: "string" - airbyte_secret: true x-speakeasy-param-sensitive: true client_secret: title: "Client Secret" @@ -17501,25 +19599,176 @@ components: description: "Region to pull data from (EU/NA/FE). See docs for more details." + default: "NA" enum: - "NA" - "EU" - "FE" - type: "string" - default: "NA" order: 4 + type: "string" start_date: title: "Start Date" description: "The Start date for collecting reports, should not be more\ \ than 60 days in the past. In YYYY-MM-DD format" + examples: + - "2022-10-10" + - "2022-10-22" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 5 + type: "string" format: "date" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + default: [] + order: 8 + type: "array" + items: + title: "StateFilterEnum" + description: "An enumeration." + enum: + - "enabled" + - "paused" + - "archived" + type: "string" + uniqueItems: true + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + default: 3 + examples: + - 3 + - 10 + order: 9 + type: "integer" + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + default: [] + order: 10 + type: "array" + items: + title: "ReportRecordTypeEnum" + description: "An enumeration." + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "string" + uniqueItems: true + sourceType: + title: "amazon-ads" + const: "amazon-ads" + enum: + - "amazon-ads" + order: 0 + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + source-amazon-ads-update: + title: "Source Amazon Ads" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2.0" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + airbyte_secret: true + order: 1 + type: "string" + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + default: "NA" + enum: + - "NA" + - "EU" + - "FE" + order: 4 + type: "string" + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" examples: - "2022-10-10" - "2022-10-22" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" order: 5 type: "string" + format: "date" profiles: title: "Profile IDs" description: @@ -17549,170 +19798,29 @@ components: "Reflects the state of the Display, Product, and Brand Campaign\ \ streams as enabled, paused, or archived. If you do not populate this\ \ field, it will be ignored completely." - items: - type: "string" - enum: - - "enabled" - - "paused" - - "archived" - type: "array" - uniqueItems: true + default: [] order: 8 - look_back_window: - title: "Look Back Window" - description: - "The amount of days to go back in time to get the updated data\ - \ from Amazon Ads" - examples: - - 3 - - 10 - type: "integer" - default: 3 - order: 9 - report_record_types: - title: "Report Record Types" - description: - "Optional configuration which accepts an array of string of\ - \ record types. Leave blank for default behaviour to pull all report types.\ - \ Use this config option only if you want to pull specific report type(s).\ - \ See docs for more details" - items: - type: "string" - enum: - - "adGroups" - - "asins" - - "asins_keywords" - - "asins_targets" - - "campaigns" - - "keywords" - - "productAds" - - "targets" - type: "array" - uniqueItems: true - order: 10 - sourceType: - title: "amazon-ads" - const: "amazon-ads" - enum: - - "amazon-ads" - order: 0 - type: "string" - required: - - "client_id" - - "client_secret" - - "refresh_token" - - "sourceType" - source-amazon-ads-update: - title: "Amazon Ads Spec" - type: "object" - properties: - auth_type: - title: "Auth Type" - const: "oauth2.0" - order: 0 - type: "string" - enum: - - "oauth2.0" - client_id: - title: "Client ID" - description: - "The client ID of your Amazon Ads developer application. See\ - \ the docs for more information." - order: 1 - type: "string" - airbyte_secret: true - client_secret: - title: "Client Secret" - description: - "The client secret of your Amazon Ads developer application.\ - \ See the docs for more information." - airbyte_secret: true - order: 2 - type: "string" - refresh_token: - title: "Refresh Token" - description: - "Amazon Ads refresh token. See the docs for more information on how to obtain this token." - airbyte_secret: true - order: 3 - type: "string" - region: - title: "Region" - description: - "Region to pull data from (EU/NA/FE). See docs for more details." - enum: - - "NA" - - "EU" - - "FE" - type: "string" - default: "NA" - order: 4 - start_date: - title: "Start Date" - description: - "The Start date for collecting reports, should not be more\ - \ than 60 days in the past. In YYYY-MM-DD format" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - format: "date" - examples: - - "2022-10-10" - - "2022-10-22" - order: 5 - type: "string" - profiles: - title: "Profile IDs" - description: - "Profile IDs you want to fetch data for. The Amazon Ads source\ - \ connector supports only profiles with seller and vendor type, profiles\ - \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ - \ profiles will be selected if they match the Profile ID OR the Marketplace\ - \ ID." - order: 6 type: "array" items: - type: "integer" - marketplace_ids: - title: "Marketplace IDs" - description: - "Marketplace IDs you want to fetch data for. Note: If Profile\ - \ IDs are also selected, profiles will be selected if they match the Profile\ - \ ID OR the Marketplace ID." - order: 7 - type: "array" - items: - type: "string" - state_filter: - title: "State Filter" - description: - "Reflects the state of the Display, Product, and Brand Campaign\ - \ streams as enabled, paused, or archived. If you do not populate this\ - \ field, it will be ignored completely." - items: - type: "string" + title: "StateFilterEnum" + description: "An enumeration." enum: - "enabled" - "paused" - "archived" - type: "array" + type: "string" uniqueItems: true - order: 8 look_back_window: title: "Look Back Window" description: "The amount of days to go back in time to get the updated data\ \ from Amazon Ads" + default: 3 examples: - 3 - 10 - type: "integer" - default: 3 order: 9 + type: "integer" report_record_types: title: "Report Record Types" description: @@ -17721,8 +19829,12 @@ components: \ Use this config option only if you want to pull specific report type(s).\ \ See docs for more details" + default: [] + order: 10 + type: "array" items: - type: "string" + title: "ReportRecordTypeEnum" + description: "An enumeration." enum: - "adGroups" - "asins" @@ -17732,9 +19844,8 @@ components: - "keywords" - "productAds" - "targets" - type: "array" + type: "string" uniqueItems: true - order: 10 required: - "client_id" - "client_secret" @@ -18257,6 +20368,133 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 3 + source-newsdata-io: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + search_query: + type: "string" + description: + "Search news articles for specific keywords or phrases present\ + \ in the news title, content, URL, meta keywords and meta description." + title: "Search Query" + order: 1 + countries: + type: "array" + description: + "Search the news articles from a specific country. You can\ + \ add up to 5 countries in a single query. Example: au, jp, br" + title: "Countries" + order: 2 + categories: + type: "array" + description: + "Search the news articles for a specific category. You can\ + \ add up to 5 categories in a single query." + title: "Categories" + order: 3 + languages: + type: "array" + description: + "Search the news articles for a specific language. You can\ + \ add up to 5 languages in a single query. " + title: "Languages" + order: 4 + domains: + type: "array" + description: + "Search the news articles for specific domains or news sources.\ + \ You can add up to 5 domains in a single query. " + title: "Domains" + order: 5 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + end_date: + type: "string" + description: "Choose an end date. Now UTC is default value" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 7 + sourceType: + title: "newsdata-io" + const: "newsdata-io" + enum: + - "newsdata-io" + order: 0 + type: "string" + source-newsdata-io-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + search_query: + type: "string" + description: + "Search news articles for specific keywords or phrases present\ + \ in the news title, content, URL, meta keywords and meta description." + title: "Search Query" + order: 1 + countries: + type: "array" + description: + "Search the news articles from a specific country. You can\ + \ add up to 5 countries in a single query. Example: au, jp, br" + title: "Countries" + order: 2 + categories: + type: "array" + description: + "Search the news articles for a specific category. You can\ + \ add up to 5 categories in a single query." + title: "Categories" + order: 3 + languages: + type: "array" + description: + "Search the news articles for a specific language. You can\ + \ add up to 5 languages in a single query. " + title: "Languages" + order: 4 + domains: + type: "array" + description: + "Search the news articles for specific domains or news sources.\ + \ You can add up to 5 domains in a single query. " + title: "Domains" + order: 5 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + end_date: + type: "string" + description: "Choose an end date. Now UTC is default value" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 7 source-clazar: type: "object" required: @@ -20426,6 +22664,45 @@ components: >here." airbyte_secret: true order: 0 + source-repairshopr: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + title: "subdomain" + order: 1 + sourceType: + title: "repairshopr" + const: "repairshopr" + enum: + - "repairshopr" + order: 0 + type: "string" + source-repairshopr-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + title: "subdomain" + order: 1 source-hubplanner: title: "Hubplanner Spec" type: "object" @@ -21937,6 +24214,53 @@ components: type: "string" airbyte_secret: true order: 4 + source-lightspeed-retail: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key or access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "The subdomain for the retailer, e.g., 'example' in 'example.retail.lightspeed.app'." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "lightspeed-retail" + const: "lightspeed-retail" + enum: + - "lightspeed-retail" + order: 0 + type: "string" + source-lightspeed-retail-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: "API key or access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "The subdomain for the retailer, e.g., 'example' in 'example.retail.lightspeed.app'." + name: "subdomain" + order: 1 + title: "Subdomain" source-postmarkapp: type: "object" required: @@ -21983,6 +24307,107 @@ components: order: 1 title: "X-Postmark-Account-Token" airbyte_secret: true + source-finnhub: + type: "object" + required: + - "api_key" + - "symbols" + - "market_news_category" + - "exchange" + - "start_date_2" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + symbols: + type: "array" + name: "company_symbol" + order: 1 + title: "Companies" + market_news_category: + type: "string" + description: + "This parameter can be 1 of the following values general, forex,\ + \ crypto, merger." + title: "Market News Category" + default: "general" + enum: + - "general" + - "forex" + - "crypto" + - "merger" + order: 2 + exchange: + type: "string" + description: "More info: https://finnhub.io/docs/api/stock-symbols" + title: "Exchange" + default: "US" + order: 3 + start_date_2: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 + sourceType: + title: "finnhub" + const: "finnhub" + enum: + - "finnhub" + order: 0 + type: "string" + source-finnhub-update: + type: "object" + required: + - "api_key" + - "symbols" + - "market_news_category" + - "exchange" + - "start_date_2" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + symbols: + type: "array" + name: "company_symbol" + order: 1 + title: "Companies" + market_news_category: + type: "string" + description: + "This parameter can be 1 of the following values general, forex,\ + \ crypto, merger." + title: "Market News Category" + default: "general" + enum: + - "general" + - "forex" + - "crypto" + - "merger" + order: 2 + exchange: + type: "string" + description: "More info: https://finnhub.io/docs/api/stock-symbols" + title: "Exchange" + default: "US" + order: 3 + start_date_2: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 source-float: type: "object" required: @@ -22093,6 +24518,66 @@ components: minimum: 1 default: 1000 order: 0 + source-onfleet: + type: "object" + required: + - "api_key" + - "password" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use for authenticating requests. You can create\ + \ and manage your API keys in the API section of the Onfleet dashboard." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + password: + type: "string" + description: + "Placeholder for basic HTTP auth password - should be set to\ + \ empty string" + name: "password" + order: 1 + title: "Placeholder Password" + default: "x" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "onfleet" + const: "onfleet" + enum: + - "onfleet" + order: 0 + type: "string" + source-onfleet-update: + type: "object" + required: + - "api_key" + - "password" + properties: + api_key: + type: "string" + description: + "API key to use for authenticating requests. You can create\ + \ and manage your API keys in the API section of the Onfleet dashboard." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + password: + type: "string" + description: + "Placeholder for basic HTTP auth password - should be set to\ + \ empty string" + name: "password" + order: 1 + title: "Placeholder Password" + default: "x" + airbyte_secret: true source-gorgias: type: "object" required: @@ -22633,7 +25118,118 @@ components: type: "string" airbyte_secret: true order: 1 - x-speakeasy-param-sensitive: true + x-speakeasy-param-sensitive: true + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + sourceType: + title: "sftp" + const: "sftp" + enum: + - "sftp" + order: 0 + type: "string" + source-sftp-update: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 - title: "SSH Key Authentication" required: - "auth_method" @@ -22655,7 +25251,6 @@ components: airbyte_secret: true multiline: true order: 1 - x-speakeasy-param-sensitive: true file_types: title: "File types" description: @@ -22685,116 +25280,75 @@ components: examples: - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" order: 6 + source-agilecrm: + type: "object" + required: + - "email" + - "domain" + - "api_key" + - "sourceType" + properties: + email: + type: "string" + description: + "Your Agile CRM account email address. This is used as the\ + \ username for authentication." + name: "email" + order: 0 + title: "Email Address" + domain: + type: "string" + description: "The specific subdomain for your Agile CRM account" + name: "domain" + order: 1 + title: "Domain" + api_key: + type: "string" + description: + "API key to use. Find it at Admin Settings -> API & Analytics\ + \ -> API Key in your Agile CRM account." + name: "api_key" + order: 2 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true sourceType: - title: "sftp" - const: "sftp" + title: "agilecrm" + const: "agilecrm" enum: - - "sftp" + - "agilecrm" order: 0 type: "string" - source-sftp-update: - title: "SFTP Source Spec" + source-agilecrm-update: type: "object" required: - - "user" - - "host" - - "port" + - "email" + - "domain" + - "api_key" properties: - user: - title: "User Name" - description: "The server user" + email: type: "string" + description: + "Your Agile CRM account email address. This is used as the\ + \ username for authentication." + name: "email" order: 0 - host: - title: "Host Address" - description: "The server host address" + title: "Email Address" + domain: type: "string" - examples: - - "www.host.com" - - "192.0.2.1" + description: "The specific subdomain for your Agile CRM account" + name: "domain" order: 1 - port: - title: "Port" - description: "The server port" - type: "integer" - default: 22 - examples: - - "22" - order: 2 - credentials: - type: "object" - title: "Authentication" - description: "The server authentication method" - order: 3 - oneOf: - - title: "Password Authentication" - required: - - "auth_method" - - "auth_user_password" - properties: - auth_method: - description: "Connect through password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - enum: - - "SSH_PASSWORD_AUTH" - auth_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 1 - - title: "SSH Key Authentication" - required: - - "auth_method" - - "auth_ssh_key" - properties: - auth_method: - description: "Connect through ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - enum: - - "SSH_KEY_AUTH" - auth_ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 1 - file_types: - title: "File types" - description: - "Coma separated file types. Currently only 'csv' and 'json'\ - \ types are supported." - type: "string" - default: "csv,json" - order: 4 - examples: - - "csv,json" - - "csv" - folder_path: - title: "Folder Path" - description: "The directory to search files for sync" + title: "Domain" + api_key: type: "string" - default: "" - examples: - - "/logs/2022" - order: 5 - file_pattern: - title: "File Pattern" description: - "The regular expression to specify files for sync in a chosen\ - \ Folder Path" - type: "string" - default: "" - examples: - - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" - order: 6 + "API key to use. Find it at Admin Settings -> API & Analytics\ + \ -> API Key in your Agile CRM account." + name: "api_key" + order: 2 + title: "API Key" + airbyte_secret: true source-google-drive: title: "Google Drive Source Spec" description: @@ -23814,6 +26368,45 @@ components: >here." airbyte_secret: true order: 2 + source-pabbly-subscriptions-billing: + type: "object" + required: + - "username" + - "sourceType" + properties: + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + username: + type: "string" + order: 0 + title: "Username" + sourceType: + title: "pabbly-subscriptions-billing" + const: "pabbly-subscriptions-billing" + enum: + - "pabbly-subscriptions-billing" + order: 0 + type: "string" + source-pabbly-subscriptions-billing-update: + type: "object" + required: + - "username" + properties: + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + username: + type: "string" + order: 0 + title: "Username" source-chameleon: type: "object" required: @@ -25170,8 +27763,61 @@ components: title: "Refresh token" airbyte_secret: true order: 4 + source-taboola: + type: "object" + required: + - "client_id" + - "client_secret" + - "account_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + description: "The ID associated with your taboola account" + order: 2 + title: "Account ID" + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "taboola" + const: "taboola" + enum: + - "taboola" + order: 0 + type: "string" + source-taboola-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "account_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + account_id: + type: "string" + description: "The ID associated with your taboola account" + order: 2 + title: "Account ID" + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true source-qualaroo: - title: "Qualaroo Spec" type: "object" required: - "token" @@ -25181,39 +27827,43 @@ components: properties: token: type: "string" - title: "API token" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API token" airbyte_secret: true + order: 0 x-speakeasy-param-sensitive: true key: type: "string" - title: "API key" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API key" airbyte_secret: true + order: 1 x-speakeasy-param-sensitive: true start_date: type: "string" - title: "Start Date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ \ data before this date will not be replicated." + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" examples: - "2021-03-01T00:00:00.000Z" + order: 2 survey_ids: type: "array" - items: - type: "string" - pattern: "^[0-9]{1,8}$" - title: "Qualaroo survey IDs" description: "IDs of the surveys from which you'd like to replicate data.\ \ If left empty, data from all surveys to which you have access will be\ \ replicated." + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + order: 3 sourceType: title: "qualaroo" const: "qualaroo" @@ -25222,7 +27872,6 @@ components: order: 0 type: "string" source-qualaroo-update: - title: "Qualaroo Spec" type: "object" required: - "token" @@ -25231,37 +27880,41 @@ components: properties: token: type: "string" - title: "API token" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API token" airbyte_secret: true + order: 0 key: type: "string" - title: "API key" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API key" airbyte_secret: true + order: 1 start_date: type: "string" - title: "Start Date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ \ data before this date will not be replicated." + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" examples: - "2021-03-01T00:00:00.000Z" + order: 2 survey_ids: type: "array" - items: - type: "string" - pattern: "^[0-9]{1,8}$" - title: "Qualaroo survey IDs" description: "IDs of the surveys from which you'd like to replicate data.\ \ If left empty, data from all surveys to which you have access will be\ \ replicated." + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + order: 3 source-front: type: "object" required: @@ -25664,6 +28317,283 @@ components: additionalProperties: true order: 3 title: "Authentication mechanism" + source-sendowl: + type: "object" + required: + - "username" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + description: "Enter you API Key" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API secret" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "sendowl" + const: "sendowl" + enum: + - "sendowl" + order: 0 + type: "string" + source-sendowl-update: + type: "object" + required: + - "username" + - "start_date" + properties: + username: + type: "string" + description: "Enter you API Key" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API secret" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-getgist: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Integration Settings on your\ + \ Gist dashboard at https://app.getgist.com/projects/_/settings/api-key." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "getgist" + const: "getgist" + enum: + - "getgist" + order: 0 + type: "string" + source-getgist-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Integration Settings on your\ + \ Gist dashboard at https://app.getgist.com/projects/_/settings/api-key." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-mailtrap: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://mailtrap.io/account" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "mailtrap" + const: "mailtrap" + enum: + - "mailtrap" + order: 0 + type: "string" + source-mailtrap-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://mailtrap.io/account" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-cloudbeds: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "cloudbeds" + const: "cloudbeds" + enum: + - "cloudbeds" + order: 0 + type: "string" + source-cloudbeds-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + source-freshbooks: + type: "object" + required: + - "client_id" + - "client_secret" + - "redirect_uri" + - "account_id" + - "client_refresh_token" + - "business_uuid" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + redirect_uri: + type: "string" + order: 2 + title: "Redirect Uri" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + order: 3 + title: "Account Id" + client_refresh_token: + type: "string" + order: 4 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + oauth_access_token: + type: "string" + description: + "The current access token. This field might be overridden by\ + \ the connector based on the token refresh endpoint response." + order: 5 + title: "Access token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + oauth_token_expiry_date: + type: "string" + description: + "The date the current access token expires in. This field might\ + \ be overridden by the connector based on the token refresh endpoint response." + order: 6 + title: "Token expiry date" + format: "date-time" + business_uuid: + type: "string" + order: 7 + title: "Business uuid" + sourceType: + title: "freshbooks" + const: "freshbooks" + enum: + - "freshbooks" + order: 0 + type: "string" + source-freshbooks-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "redirect_uri" + - "account_id" + - "client_refresh_token" + - "business_uuid" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + redirect_uri: + type: "string" + order: 2 + title: "Redirect Uri" + airbyte_secret: true + account_id: + type: "string" + order: 3 + title: "Account Id" + client_refresh_token: + type: "string" + order: 4 + title: "Refresh token" + airbyte_secret: true + oauth_access_token: + type: "string" + description: + "The current access token. This field might be overridden by\ + \ the connector based on the token refresh endpoint response." + order: 5 + title: "Access token" + airbyte_secret: true + oauth_token_expiry_date: + type: "string" + description: + "The date the current access token expires in. This field might\ + \ be overridden by the connector based on the token refresh endpoint response." + order: 6 + title: "Token expiry date" + format: "date-time" + business_uuid: + type: "string" + order: 7 + title: "Business uuid" source-just-sift: type: "object" required: @@ -26241,31 +29171,31 @@ components: properties: api_key: type: "string" - title: "API Key" - airbyte_secret: true description: "Recurly API Key. See the docs for more information on how to generate this key." order: 0 + title: "API Key" + airbyte_secret: true x-speakeasy-param-sensitive: true begin_time: type: "string" description: "ISO8601 timestamp from which the replication from Recurly\ \ API will start from." + order: 1 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 1 end_time: type: "string" description: "ISO8601 timestamp to which the replication from Recurly API\ \ will stop. Records after that date won't be imported." + order: 2 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 2 sourceType: title: "recurly" const: "recurly" @@ -26280,30 +29210,30 @@ components: properties: api_key: type: "string" - title: "API Key" - airbyte_secret: true description: "Recurly API Key. See the docs for more information on how to generate this key." order: 0 + title: "API Key" + airbyte_secret: true begin_time: type: "string" description: "ISO8601 timestamp from which the replication from Recurly\ \ API will start from." + order: 1 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 1 end_time: type: "string" description: "ISO8601 timestamp to which the replication from Recurly API\ \ will stop. Records after that date won't be imported." + order: 2 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 2 source-pennylane: type: "object" required: @@ -26471,6 +29401,7 @@ components: title: "Zendesk Chat Spec" type: "object" required: + - "subdomain" - "start_date" - "sourceType" properties: @@ -26488,8 +29419,12 @@ components: type: "string" title: "Subdomain" description: - "Required if you access Zendesk Chat from a Zendesk Support\ - \ subdomain." + "The unique subdomain of your Zendesk account (without https://).\ + \ See the Zendesk docs to find your subdomain" + pattern: "^(?!https://)" + examples: + - "myzendeskchat" default: "" credentials: title: "Authorization Method" @@ -26561,6 +29496,7 @@ components: title: "Zendesk Chat Spec" type: "object" required: + - "subdomain" - "start_date" properties: start_date: @@ -26577,8 +29513,12 @@ components: type: "string" title: "Subdomain" description: - "Required if you access Zendesk Chat from a Zendesk Support\ - \ subdomain." + "The unique subdomain of your Zendesk account (without https://).\ + \ See the Zendesk docs to find your subdomain" + pattern: "^(?!https://)" + examples: + - "myzendeskchat" default: "" credentials: title: "Authorization Method" @@ -27076,6 +30016,147 @@ components: order: 2 title: "API Endpoint Prefix" default: "api" + source-nocrm: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the admin section of your\ + \ noCRM.io account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: + "The subdomain specific to your noCRM.io account, e.g., 'yourcompany'\ + \ in 'yourcompany.nocrm.io'." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "nocrm" + const: "nocrm" + enum: + - "nocrm" + order: 0 + type: "string" + source-nocrm-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the admin section of your\ + \ noCRM.io account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: + "The subdomain specific to your noCRM.io account, e.g., 'yourcompany'\ + \ in 'yourcompany.nocrm.io'." + name: "subdomain" + order: 1 + title: "Subdomain" + source-openaq: + type: "object" + required: + - "api_key" + - "country_ids" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + country_ids: + type: "array" + description: + "The list of IDs of countries (comma separated) you need the\ + \ data for, check more: https://docs.openaq.org/resources/countries" + order: 1 + title: "Countries" + sourceType: + title: "openaq" + const: "openaq" + enum: + - "openaq" + order: 0 + type: "string" + source-openaq-update: + type: "object" + required: + - "api_key" + - "country_ids" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + country_ids: + type: "array" + description: + "The list of IDs of countries (comma separated) you need the\ + \ data for, check more: https://docs.openaq.org/resources/countries" + order: 1 + title: "Countries" + source-deputy: + type: "object" + required: + - "base_url" + - "api_key" + - "sourceType" + properties: + base_url: + type: "string" + description: "The base url for your deputy account to make API requests" + order: 0 + title: "Base URL" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "deputy" + const: "deputy" + enum: + - "deputy" + order: 0 + type: "string" + source-deputy-update: + type: "object" + required: + - "base_url" + - "api_key" + properties: + base_url: + type: "string" + description: "The base url for your deputy account to make API requests" + order: 0 + title: "Base URL" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true source-workflowmax: type: "object" required: @@ -27201,6 +30282,81 @@ components: always_show: true airbyte_secret: true order: 3 + source-stockdata: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + symbols: + type: "array" + order: 1 + title: "Symbols" + industries: + type: "array" + description: + "Specify the industries of entities which have been identified\ + \ within the article." + order: 2 + title: "Industries" + filter_entities: + type: "boolean" + order: 3 + title: "Entities" + default: false + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "stockdata" + const: "stockdata" + enum: + - "stockdata" + order: 0 + type: "string" + source-stockdata-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + symbols: + type: "array" + order: 1 + title: "Symbols" + industries: + type: "array" + description: + "Specify the industries of entities which have been identified\ + \ within the article." + order: 2 + title: "Industries" + filter_entities: + type: "boolean" + order: 3 + title: "Entities" + default: false + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-dockerhub: type: "object" required: @@ -30030,6 +33186,43 @@ components: order: 0 title: "Bearer Token" airbyte_secret: true + source-ubidots: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use for authentication. Obtain it from your Ubidots\ + \ account." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "ubidots" + const: "ubidots" + enum: + - "ubidots" + order: 0 + type: "string" + source-ubidots-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use for authentication. Obtain it from your Ubidots\ + \ account." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-height: type: "object" required: @@ -30167,6 +33360,43 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-ruddr: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it in the API Keys section of your\ + \ Ruddr workspace settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "ruddr" + const: "ruddr" + enum: + - "ruddr" + order: 0 + type: "string" + source-ruddr-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it in the API Keys section of your\ + \ Ruddr workspace settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-polygon-stock-api: type: "object" required: @@ -30834,13 +34064,14 @@ components: - id: "advanced" title: "Advanced" source-retently: - title: "Retently Api Spec" type: "object" + required: + - "sourceType" properties: credentials: - title: "Authentication Mechanism" - description: "Choose how to authenticate to Retently" type: "object" + description: "Choose how to authenticate to Retently" + title: "Authentication Mechanism" oneOf: - type: "object" title: "Authenticate via Retently (OAuth)" @@ -30857,21 +34088,21 @@ components: enum: - "Client" client_id: - title: "Client ID" type: "string" description: "The Client ID of your Retently developer application." + title: "Client ID" client_secret: - title: "Client Secret" type: "string" description: "The Client Secret of your Retently developer application." + title: "Client Secret" airbyte_secret: true x-speakeasy-param-sensitive: true refresh_token: - title: "Refresh Token" type: "string" description: "Retently Refresh Token which can be used to fetch new\ \ Bearer Tokens when the current one expires." + title: "Refresh Token" airbyte_secret: true x-speakeasy-param-sensitive: true - type: "object" @@ -30887,13 +34118,14 @@ components: enum: - "Token" api_key: - title: "API Token" + type: "string" description: "Retently API Token. See the docs for more information on how to obtain this key." - type: "string" + title: "API Token" airbyte_secret: true x-speakeasy-param-sensitive: true + order: 0 sourceType: title: "retently" const: "retently" @@ -30902,13 +34134,13 @@ components: order: 0 type: "string" source-retently-update: - title: "Retently Api Spec" type: "object" + required: [] properties: credentials: - title: "Authentication Mechanism" - description: "Choose how to authenticate to Retently" type: "object" + description: "Choose how to authenticate to Retently" + title: "Authentication Mechanism" oneOf: - type: "object" title: "Authenticate via Retently (OAuth)" @@ -30925,20 +34157,20 @@ components: enum: - "Client" client_id: - title: "Client ID" type: "string" description: "The Client ID of your Retently developer application." + title: "Client ID" client_secret: - title: "Client Secret" type: "string" description: "The Client Secret of your Retently developer application." + title: "Client Secret" airbyte_secret: true refresh_token: - title: "Refresh Token" type: "string" description: "Retently Refresh Token which can be used to fetch new\ \ Bearer Tokens when the current one expires." + title: "Refresh Token" airbyte_secret: true - type: "object" title: "Authenticate with API Token" @@ -30953,12 +34185,13 @@ components: enum: - "Token" api_key: - title: "API Token" + type: "string" description: "Retently API Token. See the docs for more information on how to obtain this key." - type: "string" + title: "API Token" airbyte_secret: true + order: 0 source-jotform: type: "object" required: @@ -31173,6 +34406,45 @@ components: type: "string" required: - "access_token" + source-nutshell: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "API Token" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "nutshell" + const: "nutshell" + enum: + - "nutshell" + order: 0 + type: "string" + source-nutshell-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "API Token" + always_show: true + airbyte_secret: true source-dbt: type: "object" required: @@ -31698,6 +34970,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 6 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" bucket: title: "Bucket" description: "Name of the S3 bucket where the file(s) exist." @@ -32172,6 +35487,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 6 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" bucket: title: "Bucket" description: "Name of the S3 bucket where the file(s) exist." @@ -32347,32 +35705,88 @@ components: examples: - "2020-10-15T00:00:00Z" order: 3 + source-box: + type: "object" + required: + - "client_id" + - "client_secret" + - "user" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + user: + type: "number" + order: 2 + title: "User" + sourceType: + title: "box" + const: "box" + enum: + - "box" + order: 0 + type: "string" + source-box-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "user" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + user: + type: "number" + order: 2 + title: "User" source-zendesk-sunshine: type: "object" required: - - "start_date" - "subdomain" + - "start_date" - "sourceType" properties: subdomain: type: "string" + description: "The subdomain for your Zendesk Account." order: 0 title: "Subdomain" - description: "The subdomain for your Zendesk Account." start_date: type: "string" - title: "Start date" - format: "date-time" description: "The date from which you'd like to replicate data for Zendesk\ \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + title: "Start date" + format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-01-01T00:00:00Z" order: 1 credentials: - title: "Authorization Method" type: "object" + title: "Authorization Method" oneOf: - type: "object" title: "OAuth2.0" @@ -32391,20 +35805,20 @@ components: order: 0 client_id: type: "string" - title: "Client ID" description: "The Client ID of your OAuth application." + title: "Client ID" airbyte_secret: true x-speakeasy-param-sensitive: true client_secret: type: "string" - title: "Client Secret" description: "The Client Secret of your OAuth application." + title: "Client Secret" airbyte_secret: true x-speakeasy-param-sensitive: true access_token: type: "string" - title: "Access Token" description: "Long-term access Token for making authenticated requests." + title: "Access Token" airbyte_secret: true x-speakeasy-param-sensitive: true - type: "object" @@ -32423,16 +35837,17 @@ components: order: 1 api_token: type: "string" - title: "API Token" description: "API Token. See the docs for information on how to generate this key." + title: "API Token" airbyte_secret: true x-speakeasy-param-sensitive: true email: type: "string" - title: "Email" description: "The user email for your Zendesk account" + title: "Email" + order: 2 sourceType: title: "zendesk-sunshine" const: "zendesk-sunshine" @@ -32443,28 +35858,28 @@ components: source-zendesk-sunshine-update: type: "object" required: - - "start_date" - "subdomain" + - "start_date" properties: subdomain: type: "string" + description: "The subdomain for your Zendesk Account." order: 0 title: "Subdomain" - description: "The subdomain for your Zendesk Account." start_date: type: "string" - title: "Start date" - format: "date-time" description: "The date from which you'd like to replicate data for Zendesk\ \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + title: "Start date" + format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-01-01T00:00:00Z" order: 1 credentials: - title: "Authorization Method" type: "object" + title: "Authorization Method" oneOf: - type: "object" title: "OAuth2.0" @@ -32483,18 +35898,18 @@ components: order: 0 client_id: type: "string" - title: "Client ID" description: "The Client ID of your OAuth application." + title: "Client ID" airbyte_secret: true client_secret: type: "string" - title: "Client Secret" description: "The Client Secret of your OAuth application." + title: "Client Secret" airbyte_secret: true access_token: type: "string" - title: "Access Token" description: "Long-term access Token for making authenticated requests." + title: "Access Token" airbyte_secret: true - type: "object" title: "API Token" @@ -32512,15 +35927,16 @@ components: order: 1 api_token: type: "string" - title: "API Token" description: "API Token. See the docs for information on how to generate this key." + title: "API Token" airbyte_secret: true email: type: "string" - title: "Email" description: "The user email for your Zendesk account" + title: "Email" + order: 2 source-mention: type: "object" required: @@ -33364,6 +36780,55 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-track-pms: + type: "object" + required: + - "customer_domain" + - "api_key" + - "sourceType" + properties: + customer_domain: + type: "string" + order: 0 + title: "Customer Domain" + api_key: + type: "string" + order: 1 + title: "API Key" + api_secret: + type: "string" + order: 2 + title: "API Secret" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "track-pms" + const: "track-pms" + enum: + - "track-pms" + order: 0 + type: "string" + source-track-pms-update: + type: "object" + required: + - "customer_domain" + - "api_key" + properties: + customer_domain: + type: "string" + order: 0 + title: "Customer Domain" + api_key: + type: "string" + order: 1 + title: "API Key" + api_secret: + type: "string" + order: 2 + title: "API Secret" + always_show: true + airbyte_secret: true source-whisky-hunter: type: "object" required: @@ -33630,6 +37095,45 @@ components: "The date from which you'd like to replicate data for Salesloft\ \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ \ date will be replicated." + source-youtube-data: + type: "object" + required: + - "api_key" + - "channel_ids" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + channel_ids: + type: "array" + order: 1 + title: "Channel IDs" + sourceType: + title: "youtube-data" + const: "youtube-data" + enum: + - "youtube-data" + order: 0 + type: "string" + source-youtube-data-update: + type: "object" + required: + - "api_key" + - "channel_ids" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + channel_ids: + type: "array" + order: 1 + title: "Channel IDs" source-yandex-metrica: title: "Yandex Metrica Spec" type: "object" @@ -34408,6 +37912,94 @@ components: - "2023-08-05T00:43:59.244Z" default: "2023-08-05T00:43:59.244Z" airbyte_secret: false + source-cal-com: + type: "object" + required: + - "orgId" + - "api_key" + - "sourceType" + properties: + orgId: + type: "string" + name: "Organization ID" + order: 0 + title: "orgId" + api_key: + type: "string" + description: "API key to use. Find it at https://cal.com/account" + name: "api_key" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "cal-com" + const: "cal-com" + enum: + - "cal-com" + order: 0 + type: "string" + source-cal-com-update: + type: "object" + required: + - "orgId" + - "api_key" + properties: + orgId: + type: "string" + name: "Organization ID" + order: 0 + title: "orgId" + api_key: + type: "string" + description: "API key to use. Find it at https://cal.com/account" + name: "api_key" + order: 1 + title: "API Key" + airbyte_secret: true + source-oveit: + type: "object" + required: + - "email" + - "password" + - "sourceType" + properties: + email: + type: "string" + description: "Oveit's login Email" + order: 0 + title: "Email" + password: + type: "string" + description: "Oveit's login Password" + order: 1 + title: "Password" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "oveit" + const: "oveit" + enum: + - "oveit" + order: 0 + type: "string" + source-oveit-update: + type: "object" + required: + - "email" + - "password" + properties: + email: + type: "string" + description: "Oveit's login Email" + order: 0 + title: "Email" + password: + type: "string" + description: "Oveit's login Password" + order: 1 + title: "Password" + airbyte_secret: true source-clockify: type: "object" required: @@ -36258,6 +39850,57 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-vercel: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token to authenticate with the Vercel API. Create and\ + \ manage tokens in your Vercel account settings." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "vercel" + const: "vercel" + enum: + - "vercel" + order: 0 + type: "string" + source-vercel-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Access token to authenticate with the Vercel API. Create and\ + \ manage tokens in your Vercel account settings." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-orbit: type: "object" required: @@ -36459,6 +40102,140 @@ components: title: "Domain name" description: "Your Confluence domain name" order: 2 + source-zoho-expense: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + data_center: + type: "string" + description: + "The domain suffix for the Zoho Expense API based on your data\ + \ center location (e.g., 'com', 'eu', 'in', etc.)" + enum: + - "com" + - "in" + - "jp" + - "ca" + - "com.cn" + - "sa" + - "com.au" + - "eu" + name: "domain" + order: 0 + title: "Data Center" + default: "com" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zoho-expense" + const: "zoho-expense" + enum: + - "zoho-expense" + order: 0 + type: "string" + source-zoho-expense-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + data_center: + type: "string" + description: + "The domain suffix for the Zoho Expense API based on your data\ + \ center location (e.g., 'com', 'eu', 'in', etc.)" + enum: + - "com" + - "in" + - "jp" + - "ca" + - "com.cn" + - "sa" + - "com.au" + - "eu" + name: "domain" + order: 0 + title: "Data Center" + default: "com" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + source-formbricks: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate and find it in your Postman\ + \ account settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "formbricks" + const: "formbricks" + enum: + - "formbricks" + order: 0 + type: "string" + source-formbricks-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate and find it in your Postman\ + \ account settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-coin-api: title: "Coin API Spec" type: "object" @@ -36633,6 +40410,105 @@ components: \ to the name of the project . Example: 44056" order: 1 title: "Project Id" + source-zoho-bigin: + type: "object" + required: + - "client_id" + - "data_center" + - "client_secret" + - "client_refresh_token" + - "module_name" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + description: "The data center where the Bigin account's resources are hosted" + enum: + - "com" + - "com.au" + - "eu" + - "in" + - "com.cn" + - "jp" + name: "data_center" + order: 1 + title: "Data Center" + default: "com" + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 3 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + module_name: + type: "string" + order: 4 + title: "Module Name" + sourceType: + title: "zoho-bigin" + const: "zoho-bigin" + enum: + - "zoho-bigin" + order: 0 + type: "string" + source-zoho-bigin-update: + type: "object" + required: + - "client_id" + - "data_center" + - "client_secret" + - "client_refresh_token" + - "module_name" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + data_center: + type: "string" + description: "The data center where the Bigin account's resources are hosted" + enum: + - "com" + - "com.au" + - "eu" + - "in" + - "com.cn" + - "jp" + name: "data_center" + order: 1 + title: "Data Center" + default: "com" + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 3 + title: "Refresh token" + airbyte_secret: true + module_name: + type: "string" + order: 4 + title: "Module Name" source-slack: title: "Slack Spec" type: "object" @@ -36878,6 +40754,61 @@ components: >docs for instructions on how to generate it." airbyte_secret: true order: 1 + source-tremendous: + type: "object" + required: + - "api_key" + - "environment" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate an API key through the Tremendous\ + \ dashboard under Team Settings > Developers. Save the key once you’ve\ + \ generated it." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + environment: + type: "string" + enum: + - "api" + - "testflight" + order: 1 + title: "Environment" + sourceType: + title: "tremendous" + const: "tremendous" + enum: + - "tremendous" + order: 0 + type: "string" + source-tremendous-update: + type: "object" + required: + - "api_key" + - "environment" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate an API key through the Tremendous\ + \ dashboard under Team Settings > Developers. Save the key once you’ve\ + \ generated it." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + environment: + type: "string" + enum: + - "api" + - "testflight" + order: 1 + title: "Environment" source-gainsight-px: type: "object" required: @@ -36913,6 +40844,35 @@ components: "The Aptrinsic API Key which is recieved from the dashboard\ \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" order: 0 + source-humanitix: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "humanitix" + const: "humanitix" + enum: + - "humanitix" + order: 0 + type: "string" + source-humanitix-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 source-plaid: type: "object" required: @@ -38539,6 +42499,99 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-zoho-invoice: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "region" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: "To be provided if a user belongs to multiple organizations" + order: 3 + title: "Organization ID" + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + order: 4 + title: "Region" + sourceType: + title: "zoho-invoice" + const: "zoho-invoice" + enum: + - "zoho-invoice" + order: 0 + type: "string" + source-zoho-invoice-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "region" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + organization_id: + type: "string" + description: "To be provided if a user belongs to multiple organizations" + order: 3 + title: "Organization ID" + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + order: 4 + title: "Region" source-breezy-hr: type: "object" required: @@ -39511,6 +43564,95 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-revolut-merchant: + type: "object" + required: + - "api_version" + - "secret_api_key" + - "start_date" + - "environment" + - "sourceType" + properties: + api_version: + type: "string" + description: + "Specify the API version to use. This is required for certain\ + \ API calls. Example: '2024-09-01'." + name: "api_version" + title: "API Version" + order: 0 + secret_api_key: + type: "string" + description: + "Secret API key to use for authenticating with the Revolut\ + \ Merchant API. Find it in your Revolut Business account under APIs >\ + \ Merchant API." + name: "secret_api_key" + title: "Secret API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + environment: + type: "string" + description: "The base url of your environment. Either sandbox or production" + title: "environment" + enum: + - "sandbox-merchant" + - "merchant" + order: 3 + sourceType: + title: "revolut-merchant" + const: "revolut-merchant" + enum: + - "revolut-merchant" + order: 0 + type: "string" + source-revolut-merchant-update: + type: "object" + required: + - "api_version" + - "secret_api_key" + - "start_date" + - "environment" + properties: + api_version: + type: "string" + description: + "Specify the API version to use. This is required for certain\ + \ API calls. Example: '2024-09-01'." + name: "api_version" + title: "API Version" + order: 0 + secret_api_key: + type: "string" + description: + "Secret API key to use for authenticating with the Revolut\ + \ Merchant API. Find it in your Revolut Business account under APIs >\ + \ Merchant API." + name: "secret_api_key" + title: "Secret API Key" + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + environment: + type: "string" + description: "The base url of your environment. Either sandbox or production" + title: "environment" + enum: + - "sandbox-merchant" + - "merchant" + order: 3 source-hibob: type: "object" required: @@ -40162,6 +44304,23 @@ components: \ token." airbyte_secret: true x-speakeasy-param-sensitive: true + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Zendesk Support plan. More\ + \ info about the rate limit plan tiers can be found on Zendesk's API docs." + order: 3 sourceType: title: "zendesk-support" const: "zendesk-support" @@ -40264,6 +44423,23 @@ components: >full documentation for more information on generating this\ \ token." airbyte_secret: true + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Zendesk Support plan. More\ + \ info about the rate limit plan tiers can be found on Zendesk's API docs." + order: 3 source-veeqo: type: "object" required: @@ -40726,6 +44902,45 @@ components: title: "Client Secret" airbyte_secret: true order: 4 + source-spotlercrm: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access Token to authenticate API requests. Generate it by\ + \ logging into your CRM system, navigating to Settings / Integrations\ + \ / API V4, and clicking 'generate new key'." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "spotlercrm" + const: "spotlercrm" + enum: + - "spotlercrm" + order: 0 + type: "string" + source-spotlercrm-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + description: + "Access Token to authenticate API requests. Generate it by\ + \ logging into your CRM system, navigating to Settings / Integrations\ + \ / API V4, and clicking 'generate new key'." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true source-ashby: type: "object" required: @@ -40818,6 +45033,48 @@ components: \ Access and select API integration." airbyte_secret: true order: 0 + source-freightview: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "freightview" + const: "freightview" + enum: + - "freightview" + order: 0 + type: "string" + source-freightview-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client Secret" + airbyte_secret: true source-onesignal: type: "object" required: @@ -45515,6 +49772,43 @@ components: pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" order: 1 format: "date-time" + source-pretix: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Obtain it from the pretix web interface\ + \ by creating a new token under your team settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "pretix" + const: "pretix" + enum: + - "pretix" + order: 0 + type: "string" + source-pretix-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Obtain it from the pretix web interface\ + \ by creating a new token under your team settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-cimis: type: "object" required: @@ -46081,6 +50375,61 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-blogger: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "blogger" + const: "blogger" + enum: + - "blogger" + order: 0 + type: "string" + source-blogger-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true source-codefresh: type: "object" required: @@ -47469,6 +51818,92 @@ components: >docs for more information on where to find that key." airbyte_secret: true order: 0 + source-web-scrapper: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://cloud.webscraper.io/api" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "web-scrapper" + const: "web-scrapper" + enum: + - "web-scrapper" + order: 0 + type: "string" + source-web-scrapper-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://cloud.webscraper.io/api" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-wufoo: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your Wufoo API Key. You can find it by logging into your Wufoo\ + \ account, selecting 'API Information' from the 'More' dropdown on any\ + \ form, and locating the 16-digit code." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "Your account subdomain/username for Wufoo." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "wufoo" + const: "wufoo" + enum: + - "wufoo" + order: 0 + type: "string" + source-wufoo-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: + "Your Wufoo API Key. You can find it by logging into your Wufoo\ + \ account, selecting 'API Information' from the 'More' dropdown on any\ + \ form, and locating the 16-digit code." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "Your account subdomain/username for Wufoo." + name: "subdomain" + order: 1 + title: "Subdomain" source-configcat: type: "object" required: @@ -47585,6 +52020,181 @@ components: description: "Your Insightly API token." airbyte_secret: true order: 1 + source-zoho-desk: + type: "object" + required: + - "client_id" + - "client_secret" + - "token_refresh_endpoint" + - "refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_refresh_endpoint: + type: "string" + name: "token_refresh_endpoint" + order: 2 + title: "Token Refresh Endpoint" + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + include_custom_domain: + type: "boolean" + order: 4 + title: "include Custom Domain" + sourceType: + title: "zoho-desk" + const: "zoho-desk" + enum: + - "zoho-desk" + order: 0 + type: "string" + source-zoho-desk-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "token_refresh_endpoint" + - "refresh_token" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + token_refresh_endpoint: + type: "string" + name: "token_refresh_endpoint" + order: 2 + title: "Token Refresh Endpoint" + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + include_custom_domain: + type: "boolean" + order: 4 + title: "include Custom Domain" + source-pipeliner: + type: "object" + required: + - "username" + - "service" + - "spaceid" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + service: + type: "string" + enum: + - "eu-central" + - "us-east" + - "ca-central" + - "ap-southeast" + order: 2 + title: "Data Center" + spaceid: + type: "string" + order: 3 + title: "Space ID" + sourceType: + title: "pipeliner" + const: "pipeliner" + enum: + - "pipeliner" + order: 0 + type: "string" + source-pipeliner-update: + type: "object" + required: + - "username" + - "service" + - "spaceid" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + service: + type: "string" + enum: + - "eu-central" + - "us-east" + - "ca-central" + - "ap-southeast" + order: 2 + title: "Data Center" + spaceid: + type: "string" + order: 3 + title: "Space ID" + source-opinion-stage: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "opinion-stage" + const: "opinion-stage" + enum: + - "opinion-stage" + order: 0 + type: "string" + source-opinion-stage-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 source-cart: title: "Cart.com Spec" type: "object" @@ -48737,89 +53347,164 @@ components: order: 1 title: "OAuth Client ID" airbyte_secret: true - x-speakeasy-param-sensitive: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "zoho-inventory" + const: "zoho-inventory" + enum: + - "zoho-inventory" + order: 0 + type: "string" + source-zoho-inventory-update: + type: "object" + required: + - "domain" + - "client_id" + - "client_secret" + - "refresh_token" + - "start_date" + properties: + domain: + type: "string" + description: + "The domain suffix for the Zoho Inventory API based on your\ + \ data center location (e.g., 'com', 'eu', 'in', etc.)" + enum: + - "com" + - "in" + - "jp" + - "eu" + - "com.au" + - "ca" + - "com.cn" + - "sa" + name: "domain" + order: 0 + title: "Domain" + default: "com" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true client_secret: type: "string" name: "client_secret" order: 2 title: "OAuth Client Secret" airbyte_secret: true - x-speakeasy-param-sensitive: true refresh_token: type: "string" name: "refresh_token" order: 3 title: "OAuth Refresh Token" airbyte_secret: true - x-speakeasy-param-sensitive: true start_date: type: "string" order: 4 title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-salesflare: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Enter you api key like this : Bearer YOUR_API_KEY" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true sourceType: - title: "zoho-inventory" - const: "zoho-inventory" + title: "salesflare" + const: "salesflare" enum: - - "zoho-inventory" + - "salesflare" order: 0 type: "string" - source-zoho-inventory-update: + source-salesflare-update: type: "object" required: - - "domain" - - "client_id" - - "client_secret" - - "refresh_token" + - "api_key" + properties: + api_key: + type: "string" + description: "Enter you api key like this : Bearer YOUR_API_KEY" + order: 0 + title: "API Key" + airbyte_secret: true + source-brevo: + type: "object" + required: + - "api_key" - "start_date" + - "sourceType" properties: - domain: + api_key: type: "string" - description: - "The domain suffix for the Zoho Inventory API based on your\ - \ data center location (e.g., 'com', 'eu', 'in', etc.)" - enum: - - "com" - - "in" - - "jp" - - "eu" - - "com.au" - - "ca" - - "com.cn" - - "sa" - name: "domain" order: 0 - title: "Domain" - default: "com" - client_id: + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: type: "string" - name: "client_id" order: 1 - title: "OAuth Client ID" - airbyte_secret: true - client_secret: + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "brevo" + const: "brevo" + enum: + - "brevo" + order: 0 type: "string" - name: "client_secret" - order: 2 - title: "OAuth Client Secret" - airbyte_secret: true - refresh_token: + source-brevo-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: type: "string" - name: "refresh_token" - order: 3 - title: "OAuth Refresh Token" + order: 0 + title: "API Key" airbyte_secret: true start_date: type: "string" - order: 4 + order: 1 title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - source-brevo: + source-finage: type: "object" required: - "api_key" + - "symbols" - "start_date" - "sourceType" properties: @@ -48829,23 +53514,84 @@ components: title: "API Key" airbyte_secret: true x-speakeasy-param-sensitive: true + symbols: + type: "array" + description: "List of symbols " + order: 1 + title: "Symbols" + tech_indicator_type: + type: "string" + description: "One of DEMA, EMA, SMA, WMA, RSI, TEMA, Williams, ADX " + enum: + - "DEMA" + - "EMA" + - "SMA" + - "WMA" + - "RSI" + - "TEMA" + - "Williams" + - "ADX" + order: 2 + title: "Technical Indicator Type" + default: "SMA" + time: + type: "string" + enum: + - "daily" + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + order: 3 + title: "Time Interval" + default: "daily" + period: + type: "string" + description: "Time period. Default is 10" + order: 4 + title: "Period" + time_aggregates: + type: "string" + description: "Size of the time" + enum: + - "minute" + - "hour" + - "day" + - "week" + - "month" + - "quarter" + - "year" + order: 5 + title: "Time aggregates" + default: "day" + time_period: + type: "string" + description: "Time Period for cash flow stmts" + enum: + - "annual" + - "quarter" + order: 6 + title: "Time Period" start_date: type: "string" - order: 1 title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 7 sourceType: - title: "brevo" - const: "brevo" + title: "finage" + const: "finage" enum: - - "brevo" + - "finage" order: 0 type: "string" - source-brevo-update: + source-finage-update: type: "object" required: - "api_key" + - "symbols" - "start_date" properties: api_key: @@ -48853,12 +53599,72 @@ components: order: 0 title: "API Key" airbyte_secret: true + symbols: + type: "array" + description: "List of symbols " + order: 1 + title: "Symbols" + tech_indicator_type: + type: "string" + description: "One of DEMA, EMA, SMA, WMA, RSI, TEMA, Williams, ADX " + enum: + - "DEMA" + - "EMA" + - "SMA" + - "WMA" + - "RSI" + - "TEMA" + - "Williams" + - "ADX" + order: 2 + title: "Technical Indicator Type" + default: "SMA" + time: + type: "string" + enum: + - "daily" + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + order: 3 + title: "Time Interval" + default: "daily" + period: + type: "string" + description: "Time period. Default is 10" + order: 4 + title: "Period" + time_aggregates: + type: "string" + description: "Size of the time" + enum: + - "minute" + - "hour" + - "day" + - "week" + - "month" + - "quarter" + - "year" + order: 5 + title: "Time aggregates" + default: "day" + time_period: + type: "string" + description: "Time Period for cash flow stmts" + enum: + - "annual" + - "quarter" + order: 6 + title: "Time Period" start_date: type: "string" - order: 1 title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 7 source-datascope: type: "object" required: @@ -49494,6 +54300,45 @@ components: - "1day" - "1week" - "1month" + source-smartreach: + type: "object" + required: + - "api_key" + - "teamid" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + teamid: + type: "number" + title: "TeamID" + order: 1 + sourceType: + title: "smartreach" + const: "smartreach" + enum: + - "smartreach" + order: 0 + type: "string" + source-smartreach-update: + type: "object" + required: + - "api_key" + - "teamid" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + teamid: + type: "number" + title: "TeamID" + order: 1 source-monday: title: "Monday Spec" type: "object" @@ -49632,6 +54477,170 @@ components: title: "Personal API Token" description: "API Token for making authenticated requests." airbyte_secret: true + source-onepagecrm: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + description: "Enter the user ID of your API app" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API Key of your API app" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "onepagecrm" + const: "onepagecrm" + enum: + - "onepagecrm" + order: 0 + type: "string" + source-onepagecrm-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + description: "Enter the user ID of your API app" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API Key of your API app" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + source-financial-modelling: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + exchange: + type: "string" + description: + "The stock exchange : AMEX, AMS, AQS, ASX, ATH, BER, BME, BRU,\ + \ BSE, BUD, BUE, BVC, CAI, CBOE, CNQ, CPH, DFM, DOH, DUS, DXE, EGX, EURONEXT,\ + \ HAM, HEL, HKSE, ICE, IOB, IST, JKT, JNB, JPX, KLS, KOE, KSC, KUW, LSE,\ + \ MCX, MEX, MIL, MUN, NASDAQ, NEO, NSE, NYSE, NZE, OEM, OQX, OSL, OTC,\ + \ PNK, PRA, RIS, SAO, SAU, SES, SET, SGO, SHH, SHZ, SIX, STO, STU, TAI,\ + \ TLV, TSX, TSXV, TWO, VIE, VSE, WSE, XETRA" + order: 1 + title: "Exchange" + default: "NASDAQ" + marketcapmorethan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap more\ + \ than the give marketcap" + order: 2 + title: "Market Cap More Than" + marketcaplowerthan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap lower\ + \ than the give marketcap" + order: 3 + title: "Market Cap Lower Than" + time_frame: + type: "string" + description: "For example 1min, 5min, 15min, 30min, 1hour, 4hour" + order: 4 + title: "Time Frame" + default: "4hour" + enum: + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + sourceType: + title: "financial-modelling" + const: "financial-modelling" + enum: + - "financial-modelling" + order: 0 + type: "string" + source-financial-modelling-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + exchange: + type: "string" + description: + "The stock exchange : AMEX, AMS, AQS, ASX, ATH, BER, BME, BRU,\ + \ BSE, BUD, BUE, BVC, CAI, CBOE, CNQ, CPH, DFM, DOH, DUS, DXE, EGX, EURONEXT,\ + \ HAM, HEL, HKSE, ICE, IOB, IST, JKT, JNB, JPX, KLS, KOE, KSC, KUW, LSE,\ + \ MCX, MEX, MIL, MUN, NASDAQ, NEO, NSE, NYSE, NZE, OEM, OQX, OSL, OTC,\ + \ PNK, PRA, RIS, SAO, SAU, SES, SET, SGO, SHH, SHZ, SIX, STO, STU, TAI,\ + \ TLV, TSX, TSXV, TWO, VIE, VSE, WSE, XETRA" + order: 1 + title: "Exchange" + default: "NASDAQ" + marketcapmorethan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap more\ + \ than the give marketcap" + order: 2 + title: "Market Cap More Than" + marketcaplowerthan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap lower\ + \ than the give marketcap" + order: 3 + title: "Market Cap Lower Than" + time_frame: + type: "string" + description: "For example 1min, 5min, 15min, 30min, 1hour, 4hour" + order: 4 + title: "Time Frame" + default: "4hour" + enum: + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 source-waiteraid: type: "object" required: @@ -50036,6 +55045,81 @@ components: description: "Identification token for app accessing data" airbyte_secret: true order: 1 + source-invoiceninja: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "invoiceninja" + const: "invoiceninja" + enum: + - "invoiceninja" + order: 0 + type: "string" + source-invoiceninja-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + source-sendpulse: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "sendpulse" + const: "sendpulse" + enum: + - "sendpulse" + order: 0 + type: "string" + source-sendpulse-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true source-bigquery: title: "BigQuery Source Spec" type: "object" @@ -50312,6 +55396,39 @@ components: >here." airbyte_secret: true order: 0 + source-tickettailor: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://www.getdrip.com/user/edit" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "tickettailor" + const: "tickettailor" + enum: + - "tickettailor" + order: 0 + type: "string" + source-tickettailor-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://www.getdrip.com/user/edit" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-calendly: type: "object" required: @@ -50448,6 +55565,72 @@ components: type: "string" title: "Engine" description: "Engine name to connect to." + source-eventee: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it at https://admin.eventee.co/\ + \ in 'Settings -> Features'." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "eventee" + const: "eventee" + enum: + - "eventee" + order: 0 + type: "string" + source-eventee-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it at https://admin.eventee.co/\ + \ in 'Settings -> Features'." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-simfin: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "simfin" + const: "simfin" + enum: + - "simfin" + order: 0 + type: "string" + source-simfin-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true source-pivotal-tracker: title: "Pivotal Tracker Spec" type: "object" @@ -50530,6 +55713,127 @@ components: description: "Date from when the sync should start in epoch Unix timestamp" order: 2 title: "Start Date" + source-brex: + type: "object" + required: + - "user_token" + - "start_date" + - "sourceType" + properties: + user_token: + type: "string" + description: + "User token to authenticate API requests. Generate it from\ + \ your Brex dashboard under Developer > Settings." + name: "user_token" + title: "User Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "brex" + const: "brex" + enum: + - "brex" + order: 0 + type: "string" + source-brex-update: + type: "object" + required: + - "user_token" + - "start_date" + properties: + user_token: + type: "string" + description: + "User token to authenticate API requests. Generate it from\ + \ your Brex dashboard under Developer > Settings." + name: "user_token" + title: "User Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-fulcrum: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://web.fulcrumapp.com/settings/api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "fulcrum" + const: "fulcrum" + enum: + - "fulcrum" + order: 0 + type: "string" + source-fulcrum-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://web.fulcrumapp.com/settings/api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-bigmailer: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can create and find it on the API key\ + \ management page in your BigMailer account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "bigmailer" + const: "bigmailer" + enum: + - "bigmailer" + order: 0 + type: "string" + source-bigmailer-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. You can create and find it on the API key\ + \ management page in your BigMailer account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-senseforce: type: "object" required: @@ -52247,6 +57551,21 @@ components: description: "The Client Secret of your Drift developer application." airbyte_secret: true title: null + snapchat-marketing: + properties: + client_id: + type: "string" + description: "The Client ID of your Snapchat developer application." + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret of your Snapchat developer application." + order: 1 + title: "Client Secret" + airbyte_secret: true + title: null gitlab: properties: credentials: @@ -52438,9 +57757,9 @@ components: "The client ID of your Amazon Ads developer application. See\ \ the docs for more information." + airbyte_secret: true order: 1 type: "string" - airbyte_secret: true client_secret: title: "Client Secret" description: @@ -52450,7 +57769,7 @@ components: airbyte_secret: true order: 2 type: "string" - title: "Amazon Ads Spec" + title: "Source Amazon Ads" github: properties: credentials: @@ -52600,20 +57919,6 @@ components: >here." airbyte_secret: true title: "Google Search Console Spec" - retently: - properties: - credentials: - properties: - client_id: - title: "Client ID" - type: "string" - description: "The Client ID of your Retently developer application." - client_secret: - title: "Client Secret" - type: "string" - description: "The Client Secret of your Retently developer application." - airbyte_secret: true - title: "Retently Api Spec" instagram: properties: client_id: @@ -52629,21 +57934,6 @@ components: airbyte_hidden: true type: "string" title: "Source Instagram" - zendesk-sunshine: - properties: - credentials: - properties: - client_id: - type: "string" - title: "Client ID" - description: "The Client ID of your OAuth application." - airbyte_secret: true - client_secret: - type: "string" - title: "Client Secret" - description: "The Client Secret of your OAuth application." - airbyte_secret: true - title: null snowflake: properties: credentials: @@ -52978,8 +58268,6 @@ components: $ref: "#/components/schemas/pinterest" - title: rd-station-marketing $ref: "#/components/schemas/rd-station-marketing" - - title: retently - $ref: "#/components/schemas/retently" - title: salesforce $ref: "#/components/schemas/salesforce" - title: shopify @@ -52988,6 +58276,8 @@ components: $ref: "#/components/schemas/slack" - title: smartsheets $ref: "#/components/schemas/smartsheets" + - title: snapchat-marketing + $ref: "#/components/schemas/snapchat-marketing" - title: snowflake $ref: "#/components/schemas/snowflake" - title: surveymonkey @@ -53002,8 +58292,6 @@ components: $ref: "#/components/schemas/youtube-analytics" - title: zendesk-chat $ref: "#/components/schemas/zendesk-chat" - - title: zendesk-sunshine - $ref: "#/components/schemas/zendesk-sunshine" - title: zendesk-support $ref: "#/components/schemas/zendesk-support" - title: zendesk-talk @@ -53018,12 +58306,16 @@ components: $ref: "#/components/schemas/source-7shifts" - title: source-activecampaign $ref: "#/components/schemas/source-activecampaign" + - title: source-agilecrm + $ref: "#/components/schemas/source-agilecrm" - title: source-airbyte $ref: "#/components/schemas/source-airbyte" - title: source-aircall $ref: "#/components/schemas/source-aircall" - title: source-airtable $ref: "#/components/schemas/source-airtable" + - title: source-akeneo + $ref: "#/components/schemas/source-akeneo" - title: source-algolia $ref: "#/components/schemas/source-algolia" - title: source-amazon-ads @@ -53044,6 +58336,8 @@ components: $ref: "#/components/schemas/source-appfollow" - title: source-apple-search-ads $ref: "#/components/schemas/source-apple-search-ads" + - title: source-apptivo + $ref: "#/components/schemas/source-apptivo" - title: source-asana $ref: "#/components/schemas/source-asana" - title: source-ashby @@ -53062,12 +58356,18 @@ components: $ref: "#/components/schemas/source-basecamp" - title: source-beamer $ref: "#/components/schemas/source-beamer" + - title: source-bigmailer + $ref: "#/components/schemas/source-bigmailer" - title: source-bigquery $ref: "#/components/schemas/source-bigquery" - title: source-bing-ads $ref: "#/components/schemas/source-bing-ads" - title: source-bitly $ref: "#/components/schemas/source-bitly" + - title: source-blogger + $ref: "#/components/schemas/source-blogger" + - title: source-box + $ref: "#/components/schemas/source-box" - title: source-braintree $ref: "#/components/schemas/source-braintree" - title: source-braze @@ -53076,20 +58376,30 @@ components: $ref: "#/components/schemas/source-breezy-hr" - title: source-brevo $ref: "#/components/schemas/source-brevo" + - title: source-brex + $ref: "#/components/schemas/source-brex" - title: source-bugsnag $ref: "#/components/schemas/source-bugsnag" - title: source-buildkite $ref: "#/components/schemas/source-buildkite" + - title: source-bunny-inc + $ref: "#/components/schemas/source-bunny-inc" - title: source-buzzsprout $ref: "#/components/schemas/source-buzzsprout" + - title: source-cal-com + $ref: "#/components/schemas/source-cal-com" - title: source-calendly $ref: "#/components/schemas/source-calendly" - title: source-callrail $ref: "#/components/schemas/source-callrail" - title: source-campaign-monitor $ref: "#/components/schemas/source-campaign-monitor" + - title: source-campayn + $ref: "#/components/schemas/source-campayn" - title: source-canny $ref: "#/components/schemas/source-canny" + - title: source-capsule-crm + $ref: "#/components/schemas/source-capsule-crm" - title: source-care-quality-commission $ref: "#/components/schemas/source-care-quality-commission" - title: source-cart @@ -53108,6 +58418,8 @@ components: $ref: "#/components/schemas/source-chartmogul" - title: source-cimis $ref: "#/components/schemas/source-cimis" + - title: source-cin7 + $ref: "#/components/schemas/source-cin7" - title: source-circa $ref: "#/components/schemas/source-circa" - title: source-circleci @@ -53122,8 +58434,12 @@ components: $ref: "#/components/schemas/source-clickup-api" - title: source-clockify $ref: "#/components/schemas/source-clockify" + - title: source-clockodo + $ref: "#/components/schemas/source-clockodo" - title: source-close-com $ref: "#/components/schemas/source-close-com" + - title: source-cloudbeds + $ref: "#/components/schemas/source-cloudbeds" - title: source-coassemble $ref: "#/components/schemas/source-coassemble" - title: source-coda @@ -53158,6 +58474,8 @@ components: $ref: "#/components/schemas/source-dbt" - title: source-delighted $ref: "#/components/schemas/source-delighted" + - title: source-deputy + $ref: "#/components/schemas/source-deputy" - title: source-dixa $ref: "#/components/schemas/source-dixa" - title: source-dockerhub @@ -53172,16 +58490,26 @@ components: $ref: "#/components/schemas/source-dropbox-sign" - title: source-dynamodb $ref: "#/components/schemas/source-dynamodb" + - title: source-e-conomic + $ref: "#/components/schemas/source-e-conomic" - title: source-easypost $ref: "#/components/schemas/source-easypost" - title: source-easypromos $ref: "#/components/schemas/source-easypromos" + - title: source-elasticemail + $ref: "#/components/schemas/source-elasticemail" - title: source-emailoctopus $ref: "#/components/schemas/source-emailoctopus" - title: source-employment-hero $ref: "#/components/schemas/source-employment-hero" + - title: source-encharge + $ref: "#/components/schemas/source-encharge" - title: source-eventbrite $ref: "#/components/schemas/source-eventbrite" + - title: source-eventee + $ref: "#/components/schemas/source-eventee" + - title: source-eventzilla + $ref: "#/components/schemas/source-eventzilla" - title: source-exchange-rates $ref: "#/components/schemas/source-exchange-rates" - title: source-ezofficeinventory @@ -53196,16 +58524,38 @@ components: $ref: "#/components/schemas/source-fauna" - title: source-file $ref: "#/components/schemas/source-file" + - title: source-fillout + $ref: "#/components/schemas/source-fillout" + - title: source-finage + $ref: "#/components/schemas/source-finage" + - title: source-financial-modelling + $ref: "#/components/schemas/source-financial-modelling" + - title: source-finnhub + $ref: "#/components/schemas/source-finnhub" + - title: source-finnworlds + $ref: "#/components/schemas/source-finnworlds" - title: source-firebolt $ref: "#/components/schemas/source-firebolt" + - title: source-firehydrant + $ref: "#/components/schemas/source-firehydrant" - title: source-fleetio $ref: "#/components/schemas/source-fleetio" + - title: source-flexmail + $ref: "#/components/schemas/source-flexmail" - title: source-flexport $ref: "#/components/schemas/source-flexport" - title: source-float $ref: "#/components/schemas/source-float" + - title: source-flowlu + $ref: "#/components/schemas/source-flowlu" + - title: source-formbricks + $ref: "#/components/schemas/source-formbricks" - title: source-free-agent-connector $ref: "#/components/schemas/source-free-agent-connector" + - title: source-freightview + $ref: "#/components/schemas/source-freightview" + - title: source-freshbooks + $ref: "#/components/schemas/source-freshbooks" - title: source-freshcaller $ref: "#/components/schemas/source-freshcaller" - title: source-freshchat @@ -53218,12 +58568,18 @@ components: $ref: "#/components/schemas/source-freshservice" - title: source-front $ref: "#/components/schemas/source-front" + - title: source-fulcrum + $ref: "#/components/schemas/source-fulcrum" - title: source-gainsight-px $ref: "#/components/schemas/source-gainsight-px" - title: source-gcs $ref: "#/components/schemas/source-gcs" + - title: source-getgist + $ref: "#/components/schemas/source-getgist" - title: source-getlago $ref: "#/components/schemas/source-getlago" + - title: source-gitbook + $ref: "#/components/schemas/source-gitbook" - title: source-github $ref: "#/components/schemas/source-github" - title: source-gitlab @@ -53246,10 +58602,14 @@ components: $ref: "#/components/schemas/source-google-analytics-data-api" - title: source-google-calendar $ref: "#/components/schemas/source-google-calendar" + - title: source-google-classroom + $ref: "#/components/schemas/source-google-classroom" - title: source-google-directory $ref: "#/components/schemas/source-google-directory" - title: source-google-drive $ref: "#/components/schemas/source-google-drive" + - title: source-google-forms + $ref: "#/components/schemas/source-google-forms" - title: source-google-pagespeed-insights $ref: "#/components/schemas/source-google-pagespeed-insights" - title: source-google-search-console @@ -53284,10 +58644,14 @@ components: $ref: "#/components/schemas/source-hubplanner" - title: source-hubspot $ref: "#/components/schemas/source-hubspot" + - title: source-humanitix + $ref: "#/components/schemas/source-humanitix" - title: source-illumina-basespace $ref: "#/components/schemas/source-illumina-basespace" - title: source-incident-io $ref: "#/components/schemas/source-incident-io" + - title: source-inflowinventory + $ref: "#/components/schemas/source-inflowinventory" - title: source-insightly $ref: "#/components/schemas/source-insightly" - title: source-instagram @@ -53298,12 +58662,16 @@ components: $ref: "#/components/schemas/source-intercom" - title: source-invoiced $ref: "#/components/schemas/source-invoiced" + - title: source-invoiceninja + $ref: "#/components/schemas/source-invoiceninja" - title: source-ip2whois $ref: "#/components/schemas/source-ip2whois" - title: source-iterable $ref: "#/components/schemas/source-iterable" - title: source-jira $ref: "#/components/schemas/source-jira" + - title: source-jobnimbus + $ref: "#/components/schemas/source-jobnimbus" - title: source-jotform $ref: "#/components/schemas/source-jotform" - title: source-just-sift @@ -53332,8 +58700,12 @@ components: $ref: "#/components/schemas/source-leadfeeder" - title: source-lemlist $ref: "#/components/schemas/source-lemlist" + - title: source-less-annoying-crm + $ref: "#/components/schemas/source-less-annoying-crm" - title: source-lever-hiring $ref: "#/components/schemas/source-lever-hiring" + - title: source-lightspeed-retail + $ref: "#/components/schemas/source-lightspeed-retail" - title: source-linkedin-ads $ref: "#/components/schemas/source-linkedin-ads" - title: source-linkedin-pages @@ -53358,8 +58730,14 @@ components: $ref: "#/components/schemas/source-mailjet-mail" - title: source-mailjet-sms $ref: "#/components/schemas/source-mailjet-sms" + - title: source-mailosaur + $ref: "#/components/schemas/source-mailosaur" + - title: source-mailtrap + $ref: "#/components/schemas/source-mailtrap" - title: source-marketo $ref: "#/components/schemas/source-marketo" + - title: source-marketstack + $ref: "#/components/schemas/source-marketstack" - title: source-mention $ref: "#/components/schemas/source-mention" - title: source-metabase @@ -53406,10 +58784,16 @@ components: $ref: "#/components/schemas/source-netsuite" - title: source-news-api $ref: "#/components/schemas/source-news-api" + - title: source-newsdata-io + $ref: "#/components/schemas/source-newsdata-io" + - title: source-nocrm + $ref: "#/components/schemas/source-nocrm" - title: source-northpass-lms $ref: "#/components/schemas/source-northpass-lms" - title: source-notion $ref: "#/components/schemas/source-notion" + - title: source-nutshell + $ref: "#/components/schemas/source-nutshell" - title: source-nylas $ref: "#/components/schemas/source-nylas" - title: source-nytimes @@ -53418,12 +58802,24 @@ components: $ref: "#/components/schemas/source-okta" - title: source-omnisend $ref: "#/components/schemas/source-omnisend" + - title: source-oncehub + $ref: "#/components/schemas/source-oncehub" + - title: source-onepagecrm + $ref: "#/components/schemas/source-onepagecrm" - title: source-onesignal $ref: "#/components/schemas/source-onesignal" + - title: source-onfleet + $ref: "#/components/schemas/source-onfleet" - title: source-open-data-dc $ref: "#/components/schemas/source-open-data-dc" + - title: source-openaq + $ref: "#/components/schemas/source-openaq" + - title: source-openfda + $ref: "#/components/schemas/source-openfda" - title: source-openweather $ref: "#/components/schemas/source-openweather" + - title: source-opinion-stage + $ref: "#/components/schemas/source-opinion-stage" - title: source-opsgenie $ref: "#/components/schemas/source-opsgenie" - title: source-oracle @@ -53438,8 +58834,16 @@ components: $ref: "#/components/schemas/source-outbrain-amplify" - title: source-outreach $ref: "#/components/schemas/source-outreach" + - title: source-oveit + $ref: "#/components/schemas/source-oveit" + - title: source-pabbly-subscriptions-billing + $ref: "#/components/schemas/source-pabbly-subscriptions-billing" - title: source-pandadoc $ref: "#/components/schemas/source-pandadoc" + - title: source-paperform + $ref: "#/components/schemas/source-paperform" + - title: source-papersign + $ref: "#/components/schemas/source-papersign" - title: source-pardot $ref: "#/components/schemas/source-pardot" - title: source-paypal-transaction @@ -53462,6 +58866,8 @@ components: $ref: "#/components/schemas/source-pinterest" - title: source-pipedrive $ref: "#/components/schemas/source-pipedrive" + - title: source-pipeliner + $ref: "#/components/schemas/source-pipeliner" - title: source-pivotal-tracker $ref: "#/components/schemas/source-pivotal-tracker" - title: source-piwik @@ -53486,6 +58892,8 @@ components: $ref: "#/components/schemas/source-postmarkapp" - title: source-prestashop $ref: "#/components/schemas/source-prestashop" + - title: source-pretix + $ref: "#/components/schemas/source-pretix" - title: source-primetric $ref: "#/components/schemas/source-primetric" - title: source-productboard @@ -53518,26 +58926,36 @@ components: $ref: "#/components/schemas/source-referralhero" - title: source-rentcast $ref: "#/components/schemas/source-rentcast" + - title: source-repairshopr + $ref: "#/components/schemas/source-repairshopr" - title: source-reply-io $ref: "#/components/schemas/source-reply-io" - title: source-retently $ref: "#/components/schemas/source-retently" - title: source-revenuecat $ref: "#/components/schemas/source-revenuecat" + - title: source-revolut-merchant + $ref: "#/components/schemas/source-revolut-merchant" - title: source-rki-covid $ref: "#/components/schemas/source-rki-covid" + - title: source-rocketlane + $ref: "#/components/schemas/source-rocketlane" - title: source-rollbar $ref: "#/components/schemas/source-rollbar" - title: source-rootly $ref: "#/components/schemas/source-rootly" - title: source-rss $ref: "#/components/schemas/source-rss" + - title: source-ruddr + $ref: "#/components/schemas/source-ruddr" - title: source-s3 $ref: "#/components/schemas/source-s3" - title: source-safetyculture $ref: "#/components/schemas/source-safetyculture" - title: source-sage-hr $ref: "#/components/schemas/source-sage-hr" + - title: source-salesflare + $ref: "#/components/schemas/source-salesflare" - title: source-salesforce $ref: "#/components/schemas/source-salesforce" - title: source-salesloft @@ -53556,6 +58974,10 @@ components: $ref: "#/components/schemas/source-sendgrid" - title: source-sendinblue $ref: "#/components/schemas/source-sendinblue" + - title: source-sendowl + $ref: "#/components/schemas/source-sendowl" + - title: source-sendpulse + $ref: "#/components/schemas/source-sendpulse" - title: source-senseforce $ref: "#/components/schemas/source-senseforce" - title: source-sentry @@ -53566,6 +58988,8 @@ components: $ref: "#/components/schemas/source-sftp-bulk" - title: source-sharetribe $ref: "#/components/schemas/source-sharetribe" + - title: source-shippo + $ref: "#/components/schemas/source-shippo" - title: source-shopify $ref: "#/components/schemas/source-shopify" - title: source-shortcut @@ -53574,6 +58998,8 @@ components: $ref: "#/components/schemas/source-shortio" - title: source-sigma-computing $ref: "#/components/schemas/source-sigma-computing" + - title: source-simfin + $ref: "#/components/schemas/source-simfin" - title: source-simplecast $ref: "#/components/schemas/source-simplecast" - title: source-simplesat @@ -53584,6 +59010,8 @@ components: $ref: "#/components/schemas/source-smaily" - title: source-smartengage $ref: "#/components/schemas/source-smartengage" + - title: source-smartreach + $ref: "#/components/schemas/source-smartreach" - title: source-smartsheets $ref: "#/components/schemas/source-smartsheets" - title: source-smartwaiver @@ -53602,6 +59030,8 @@ components: $ref: "#/components/schemas/source-sparkpost" - title: source-split-io $ref: "#/components/schemas/source-split-io" + - title: source-spotlercrm + $ref: "#/components/schemas/source-spotlercrm" - title: source-square $ref: "#/components/schemas/source-square" - title: source-squarespace @@ -53610,6 +59040,8 @@ components: $ref: "#/components/schemas/source-statsig" - title: source-statuspage $ref: "#/components/schemas/source-statuspage" + - title: source-stockdata + $ref: "#/components/schemas/source-stockdata" - title: source-strava $ref: "#/components/schemas/source-strava" - title: source-stripe @@ -53620,6 +59052,10 @@ components: $ref: "#/components/schemas/source-surveymonkey" - title: source-survicate $ref: "#/components/schemas/source-survicate" + - title: source-systeme + $ref: "#/components/schemas/source-systeme" + - title: source-taboola + $ref: "#/components/schemas/source-taboola" - title: source-teamtailor $ref: "#/components/schemas/source-teamtailor" - title: source-teamwork @@ -53634,14 +59070,22 @@ components: $ref: "#/components/schemas/source-thinkific" - title: source-ticketmaster $ref: "#/components/schemas/source-ticketmaster" + - title: source-tickettailor + $ref: "#/components/schemas/source-tickettailor" - title: source-tiktok-marketing $ref: "#/components/schemas/source-tiktok-marketing" - title: source-timely $ref: "#/components/schemas/source-timely" + - title: source-tinyemail + $ref: "#/components/schemas/source-tinyemail" - title: source-todoist $ref: "#/components/schemas/source-todoist" + - title: source-track-pms + $ref: "#/components/schemas/source-track-pms" - title: source-trello $ref: "#/components/schemas/source-trello" + - title: source-tremendous + $ref: "#/components/schemas/source-tremendous" - title: source-trustpilot $ref: "#/components/schemas/source-trustpilot" - title: source-tvmaze-schedule @@ -53656,6 +59100,8 @@ components: $ref: "#/components/schemas/source-twitter" - title: source-typeform $ref: "#/components/schemas/source-typeform" + - title: source-ubidots + $ref: "#/components/schemas/source-ubidots" - title: source-unleash $ref: "#/components/schemas/source-unleash" - title: source-uppromote @@ -53668,6 +59114,8 @@ components: $ref: "#/components/schemas/source-vantage" - title: source-veeqo $ref: "#/components/schemas/source-veeqo" + - title: source-vercel + $ref: "#/components/schemas/source-vercel" - title: source-visma-economic $ref: "#/components/schemas/source-visma-economic" - title: source-vitally @@ -53680,6 +59128,8 @@ components: $ref: "#/components/schemas/source-wasabi-stats-api" - title: source-weatherstack $ref: "#/components/schemas/source-weatherstack" + - title: source-web-scrapper + $ref: "#/components/schemas/source-web-scrapper" - title: source-webflow $ref: "#/components/schemas/source-webflow" - title: source-when-i-work @@ -53700,6 +59150,8 @@ components: $ref: "#/components/schemas/source-workramp" - title: source-wrike $ref: "#/components/schemas/source-wrike" + - title: source-wufoo + $ref: "#/components/schemas/source-wufoo" - title: source-xkcd $ref: "#/components/schemas/source-xkcd" - title: source-xsolla @@ -53714,6 +59166,8 @@ components: $ref: "#/components/schemas/source-you-need-a-budget-ynab" - title: source-youtube-analytics $ref: "#/components/schemas/source-youtube-analytics" + - title: source-youtube-data + $ref: "#/components/schemas/source-youtube-data" - title: source-zapier-supported-storage $ref: "#/components/schemas/source-zapier-supported-storage" - title: source-zendesk-chat @@ -53728,12 +59182,28 @@ components: $ref: "#/components/schemas/source-zenefits" - title: source-zenloop $ref: "#/components/schemas/source-zenloop" + - title: source-zoho-analytics-metadata-api + $ref: "#/components/schemas/source-zoho-analytics-metadata-api" + - title: source-zoho-bigin + $ref: "#/components/schemas/source-zoho-bigin" + - title: source-zoho-billing + $ref: "#/components/schemas/source-zoho-billing" - title: source-zoho-books $ref: "#/components/schemas/source-zoho-books" + - title: source-zoho-campaign + $ref: "#/components/schemas/source-zoho-campaign" - title: source-zoho-crm $ref: "#/components/schemas/source-zoho-crm" + - title: source-zoho-desk + $ref: "#/components/schemas/source-zoho-desk" + - title: source-zoho-expense + $ref: "#/components/schemas/source-zoho-expense" - title: source-zoho-inventory $ref: "#/components/schemas/source-zoho-inventory" + - title: source-zoho-invoice + $ref: "#/components/schemas/source-zoho-invoice" + - title: source-zonka-feedback + $ref: "#/components/schemas/source-zonka-feedback" - title: source-zoom $ref: "#/components/schemas/source-zoom" DestinationConfiguration: @@ -53812,10 +59282,10 @@ components: - notion - pinterest - rd-station-marketing - - retently - salesforce - slack - smartsheets + - snapchat-marketing - snowflake - surveymonkey - tiktok-marketing @@ -53823,7 +59293,6 @@ components: - typeform - youtube-analytics - zendesk-chat - - zendesk-sunshine - zendesk-support - zendesk-talk securitySchemes: diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_streams.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_streams.yaml index eb575de9892..8149e47684d 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_streams.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_streams.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Streams" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -149,6 +149,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -508,6 +509,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -539,6 +541,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -656,6 +661,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -672,6 +678,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -690,6 +699,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -706,6 +716,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_users.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_users.yaml index f7a676a5b57..87ae7cfea37 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_users.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_users.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Users" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -147,6 +147,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -506,6 +507,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -537,6 +539,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -654,6 +659,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -670,6 +676,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -688,6 +697,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -704,6 +714,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_workspaces.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_workspaces.yaml index 62f35dd9030..72a8d49ef51 100644 --- a/airbyte-api/server-api/src/main/openapi/api_documentation_workspaces.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_workspaces.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "Workspaces" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -319,6 +319,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -678,6 +679,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -709,6 +711,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -826,6 +831,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -842,6 +848,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -860,6 +869,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -876,6 +886,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -1426,6 +1439,21 @@ components: description: "The Client Secret of your Drift developer application." airbyte_secret: true title: null + snapchat-marketing: + properties: + client_id: + type: "string" + description: "The Client ID of your Snapchat developer application." + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret of your Snapchat developer application." + order: 1 + title: "Client Secret" + airbyte_secret: true + title: null gitlab: properties: credentials: @@ -1617,9 +1645,9 @@ components: "The client ID of your Amazon Ads developer application. See\ \ the docs for more information." + airbyte_secret: true order: 1 type: "string" - airbyte_secret: true client_secret: title: "Client Secret" description: @@ -1629,7 +1657,7 @@ components: airbyte_secret: true order: 2 type: "string" - title: "Amazon Ads Spec" + title: "Source Amazon Ads" github: properties: credentials: @@ -1779,20 +1807,6 @@ components: >here." airbyte_secret: true title: "Google Search Console Spec" - retently: - properties: - credentials: - properties: - client_id: - title: "Client ID" - type: "string" - description: "The Client ID of your Retently developer application." - client_secret: - title: "Client Secret" - type: "string" - description: "The Client Secret of your Retently developer application." - airbyte_secret: true - title: "Retently Api Spec" instagram: properties: client_id: @@ -1808,21 +1822,6 @@ components: airbyte_hidden: true type: "string" title: "Source Instagram" - zendesk-sunshine: - properties: - credentials: - properties: - client_id: - type: "string" - title: "Client ID" - description: "The Client ID of your OAuth application." - airbyte_secret: true - client_secret: - type: "string" - title: "Client Secret" - description: "The Client Secret of your OAuth application." - airbyte_secret: true - title: null snowflake: properties: credentials: @@ -2157,8 +2156,6 @@ components: $ref: "#/components/schemas/pinterest" - title: rd-station-marketing $ref: "#/components/schemas/rd-station-marketing" - - title: retently - $ref: "#/components/schemas/retently" - title: salesforce $ref: "#/components/schemas/salesforce" - title: shopify @@ -2167,6 +2164,8 @@ components: $ref: "#/components/schemas/slack" - title: smartsheets $ref: "#/components/schemas/smartsheets" + - title: snapchat-marketing + $ref: "#/components/schemas/snapchat-marketing" - title: snowflake $ref: "#/components/schemas/snowflake" - title: surveymonkey @@ -2181,8 +2180,6 @@ components: $ref: "#/components/schemas/youtube-analytics" - title: zendesk-chat $ref: "#/components/schemas/zendesk-chat" - - title: zendesk-sunshine - $ref: "#/components/schemas/zendesk-sunshine" - title: zendesk-support $ref: "#/components/schemas/zendesk-support" - title: zendesk-talk @@ -2266,10 +2263,10 @@ components: - notion - pinterest - rd-station-marketing - - retently - salesforce - slack - smartsheets + - snapchat-marketing - snowflake - surveymonkey - tiktok-marketing @@ -2277,7 +2274,6 @@ components: - typeform - youtube-analytics - zendesk-chat - - zendesk-sunshine - zendesk-support - zendesk-talk securitySchemes: diff --git a/airbyte-api/server-api/src/main/openapi/api_sdk.yaml b/airbyte-api/server-api/src/main/openapi/api_sdk.yaml index ac3dcac170c..36cb787a1f1 100644 --- a/airbyte-api/server-api/src/main/openapi/api_sdk.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_sdk.yaml @@ -3,7 +3,7 @@ openapi: "3.1.0" info: title: "airbyte-api" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -1548,6 +1548,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -1907,6 +1908,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -1938,6 +1940,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: "Connection" x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -2055,6 +2060,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -2071,6 +2077,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -2089,6 +2098,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -2105,6 +2115,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -2641,6 +2654,71 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-google-forms: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "form_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + form_id: + type: "array" + order: 3 + title: "Form IDs" + sourceType: + title: "google-forms" + const: "google-forms" + enum: + - "google-forms" + order: 0 + type: "string" + source-google-forms-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "form_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + form_id: + type: "array" + order: 3 + title: "Form IDs" source-the-guardian-api: title: "The Guardian Api Spec" type: "object" @@ -3039,6 +3117,65 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-google-classroom: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + title: "OAuth Client ID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + title: "OAuth Client Secret" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "google-classroom" + const: "google-classroom" + enum: + - "google-classroom" + order: 0 + type: "string" + source-google-classroom-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + properties: + client_id: + type: "string" + name: "client_id" + title: "OAuth Client ID" + airbyte_secret: true + order: 0 + client_secret: + type: "string" + name: "client_secret" + title: "OAuth Client Secret" + airbyte_secret: true + order: 1 + client_refresh_token: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 2 source-klaviyo: title: "Klaviyo Spec" type: "object" @@ -3075,6 +3212,23 @@ components: \ the \"predictive_analytics\" column from being populated in your downstream\ \ destination." order: 2 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 3 sourceType: title: "klaviyo" const: "klaviyo" @@ -3120,6 +3274,23 @@ components: \ the \"predictive_analytics\" column from being populated in your downstream\ \ destination." order: 2 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 3 required: - "api_key" source-datadog: @@ -3413,7 +3584,6 @@ components: title: "API Key" airbyte_secret: true source-quickbooks: - title: "Source QuickBooks Spec" type: "object" required: - "credentials" @@ -3422,9 +3592,7 @@ components: - "sourceType" properties: credentials: - title: "Authorization Method" type: "object" - order: 0 oneOf: - type: "object" title: "OAuth2.0" @@ -3436,6 +3604,14 @@ components: - "token_expiry_date" - "realm_id" properties: + realm_id: + type: "string" + title: "Realm ID" + description: + "Labeled Company ID. The Make API Calls panel is populated\ + \ with the realm id and the current access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true auth_type: type: "string" const: "oauth2.0" @@ -3449,58 +3625,52 @@ components: \ value from the Keys tab on the app profile via My Apps on the\ \ developer site. There are two versions of this key: development\ \ and production." + access_token: + type: "string" + title: "Access Token" + description: "Access token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true client_secret: + type: "string" + title: "Client Secret" description: " Obtain this value from the Keys tab on the app profile\ \ via My Apps on the developer site. There are two versions of this\ \ key: development and production." - title: "Client Secret" - type: "string" airbyte_secret: true x-speakeasy-param-sensitive: true refresh_token: - description: "A token used when refreshing the access token." - title: "Refresh Token" - type: "string" - airbyte_secret: true - x-speakeasy-param-sensitive: true - access_token: - description: "Access token for making authenticated requests." - title: "Access Token" type: "string" + title: "Refresh Token" + description: "A token used when refreshing the access token." airbyte_secret: true x-speakeasy-param-sensitive: true token_expiry_date: type: "string" title: "Token Expiry Date" - description: "The date-time when the access token should be refreshed." format: "date-time" - realm_id: - description: - "Labeled Company ID. The Make API Calls panel is populated\ - \ with the realm id and the current access token." - title: "Realm ID" - type: "string" - airbyte_secret: true - x-speakeasy-param-sensitive: true + description: "The date-time when the access token should be refreshed." + order: 0 + title: "Authorization Method" start_date: + type: "string" order: 1 - description: - "The default value to use if no bookmark exists for an endpoint\ - \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ - \ date will not be replicated." title: "Start Date" - type: "string" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-03-20T00:00:00Z" + description: + "The default value to use if no bookmark exists for an endpoint\ + \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ + \ date will not be replicated." sandbox: + type: "boolean" order: 2 - description: "Determines whether to use the sandbox or production environment." title: "Sandbox" - type: "boolean" default: false + description: "Determines whether to use the sandbox or production environment." sourceType: title: "quickbooks" const: "quickbooks" @@ -3509,7 +3679,6 @@ components: order: 0 type: "string" source-quickbooks-update: - title: "Source QuickBooks Spec" type: "object" required: - "credentials" @@ -3517,9 +3686,7 @@ components: - "sandbox" properties: credentials: - title: "Authorization Method" type: "object" - order: 0 oneOf: - type: "object" title: "OAuth2.0" @@ -3531,6 +3698,13 @@ components: - "token_expiry_date" - "realm_id" properties: + realm_id: + type: "string" + title: "Realm ID" + description: + "Labeled Company ID. The Make API Calls panel is populated\ + \ with the realm id and the current access token." + airbyte_secret: true auth_type: type: "string" const: "oauth2.0" @@ -3544,54 +3718,94 @@ components: \ value from the Keys tab on the app profile via My Apps on the\ \ developer site. There are two versions of this key: development\ \ and production." + access_token: + type: "string" + title: "Access Token" + description: "Access token for making authenticated requests." + airbyte_secret: true client_secret: + type: "string" + title: "Client Secret" description: " Obtain this value from the Keys tab on the app profile\ \ via My Apps on the developer site. There are two versions of this\ \ key: development and production." - title: "Client Secret" - type: "string" airbyte_secret: true refresh_token: - description: "A token used when refreshing the access token." - title: "Refresh Token" - type: "string" - airbyte_secret: true - access_token: - description: "Access token for making authenticated requests." - title: "Access Token" type: "string" + title: "Refresh Token" + description: "A token used when refreshing the access token." airbyte_secret: true token_expiry_date: type: "string" title: "Token Expiry Date" - description: "The date-time when the access token should be refreshed." format: "date-time" - realm_id: - description: - "Labeled Company ID. The Make API Calls panel is populated\ - \ with the realm id and the current access token." - title: "Realm ID" - type: "string" - airbyte_secret: true + description: "The date-time when the access token should be refreshed." + order: 0 + title: "Authorization Method" start_date: + type: "string" order: 1 - description: - "The default value to use if no bookmark exists for an endpoint\ - \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ - \ date will not be replicated." title: "Start Date" - type: "string" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-03-20T00:00:00Z" + description: + "The default value to use if no bookmark exists for an endpoint\ + \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ + \ date will not be replicated." sandbox: + type: "boolean" order: 2 - description: "Determines whether to use the sandbox or production environment." title: "Sandbox" - type: "boolean" default: false + description: "Determines whether to use the sandbox or production environment." + source-flowlu: + type: "object" + required: + - "api_key" + - "company" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + company: + type: "string" + name: "company" + order: 1 + title: "Company" + sourceType: + title: "flowlu" + const: "flowlu" + enum: + - "flowlu" + order: 0 + type: "string" + source-flowlu-update: + type: "object" + required: + - "api_key" + - "company" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + company: + type: "string" + name: "company" + order: 1 + title: "Company" source-beamer: type: "object" required: @@ -4187,6 +4401,49 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-mailosaur: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + description: 'Enter "api" here' + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your api key here" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "mailosaur" + const: "mailosaur" + enum: + - "mailosaur" + order: 0 + type: "string" + source-mailosaur-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + description: 'Enter "api" here' + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your api key here" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true source-buildkite: type: "object" required: @@ -4891,6 +5148,77 @@ components: examples: - "https://api.kaon.kyve.network/" - "https://api.korellia.kyve.network/" + source-capsule-crm: + type: "object" + required: + - "bearer_token" + - "start_date" + - "entity" + - "sourceType" + properties: + bearer_token: + type: "string" + description: + "Bearer token to authenticate API requests. Generate it from\ + \ the 'My Preferences' > 'API Authentication Tokens' page in your Capsule\ + \ account." + name: "bearer_token" + order: 0 + title: "Bearer Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + entity: + type: "string" + title: "Entity" + enum: + - "parties" + - "opportunities" + - "kases" + order: 2 + sourceType: + title: "capsule-crm" + const: "capsule-crm" + enum: + - "capsule-crm" + order: 0 + type: "string" + source-capsule-crm-update: + type: "object" + required: + - "bearer_token" + - "start_date" + - "entity" + properties: + bearer_token: + type: "string" + description: + "Bearer token to authenticate API requests. Generate it from\ + \ the 'My Preferences' > 'API Authentication Tokens' page in your Capsule\ + \ account." + name: "bearer_token" + order: 0 + title: "Bearer Token" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + entity: + type: "string" + title: "Entity" + enum: + - "parties" + - "opportunities" + - "kases" + order: 2 source-sigma-computing: type: "object" required: @@ -5301,6 +5629,48 @@ components: default: 100000 minimum: 15000 maximum: 200000 + source-shippo: + type: "object" + required: + - "shippo_token" + - "start_date" + - "sourceType" + properties: + shippo_token: + type: "string" + description: "The bearer token used for making requests" + title: "Shippo Token" + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "shippo" + const: "shippo" + enum: + - "shippo" + order: 0 + type: "string" + source-shippo-update: + type: "object" + required: + - "shippo_token" + - "start_date" + properties: + shippo_token: + type: "string" + description: "The bearer token used for making requests" + title: "Shippo Token" + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 source-coda: type: "object" required: @@ -6746,7 +7116,6 @@ components: - "client_id" - "client_secret" - "tenant_id" - - "application_id_uri" - "user_id" - "sourceType" properties: @@ -6768,15 +7137,9 @@ components: title: "Tenant Id" airbyte_secret: true x-speakeasy-param-sensitive: true - application_id_uri: - type: "string" - order: 3 - title: "Application Id URI" - airbyte_secret: true - x-speakeasy-param-sensitive: true user_id: type: "string" - order: 4 + order: 3 title: "User Id" airbyte_secret: true x-speakeasy-param-sensitive: true @@ -6793,7 +7156,6 @@ components: - "client_id" - "client_secret" - "tenant_id" - - "application_id_uri" - "user_id" properties: client_id: @@ -6811,16 +7173,62 @@ components: order: 2 title: "Tenant Id" airbyte_secret: true - application_id_uri: + user_id: type: "string" order: 3 - title: "Application Id URI" + title: "User Id" airbyte_secret: true - user_id: + source-less-annoying-crm: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: type: "string" - order: 4 - title: "User Id" + description: + "API key to use. Manage and create your API keys on the Programmer\ + \ API settings page at https://account.lessannoyingcrm.com/app/Settings/Api." + name: "api_key" + order: 0 + title: "API Key" airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "less-annoying-crm" + const: "less-annoying-crm" + enum: + - "less-annoying-crm" + order: 0 + type: "string" + source-less-annoying-crm-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Manage and create your API keys on the Programmer\ + \ API settings page at https://account.lessannoyingcrm.com/app/Settings/Api." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-planhat: type: "object" required: @@ -6856,6 +7264,39 @@ components: order: 0 title: "API Token" airbyte_secret: true + source-encharge: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "encharge" + const: "encharge" + enum: + - "encharge" + order: 0 + type: "string" + source-encharge-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-shortio: title: "Shortio Spec" type: "object" @@ -6952,6 +7393,146 @@ components: title: "Rest API Key" airbyte_secret: true description: "Instatus REST API key" + source-flexmail: + type: "object" + required: + - "account_id" + - "personal_access_token" + - "sourceType" + properties: + account_id: + type: "string" + description: + "Your Flexmail account ID. You can find it in your Flexmail\ + \ account settings." + name: "account_id" + order: 0 + title: "Account ID" + personal_access_token: + type: "string" + description: + "A personal access token for API authentication. Manage your\ + \ tokens in Flexmail under Settings > API > Personal access tokens." + name: "personal_access_token" + order: 1 + title: "Personal Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "flexmail" + const: "flexmail" + enum: + - "flexmail" + order: 0 + type: "string" + source-flexmail-update: + type: "object" + required: + - "account_id" + - "personal_access_token" + properties: + account_id: + type: "string" + description: + "Your Flexmail account ID. You can find it in your Flexmail\ + \ account settings." + name: "account_id" + order: 0 + title: "Account ID" + personal_access_token: + type: "string" + description: + "A personal access token for API authentication. Manage your\ + \ tokens in Flexmail under Settings > API > Personal access tokens." + name: "personal_access_token" + order: 1 + title: "Personal Access Token" + airbyte_secret: true + source-openfda: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "openfda" + const: "openfda" + enum: + - "openfda" + order: 0 + type: "string" + source-openfda-update: + type: "object" + required: [] + properties: {} + source-elasticemail: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + scope_type: + type: "string" + enum: + - "Personal" + - "Global" + order: 1 + title: "scope type" + from: + type: "string" + order: 2 + title: "From" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "elasticemail" + const: "elasticemail" + enum: + - "elasticemail" + order: 0 + type: "string" + source-elasticemail-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + scope_type: + type: "string" + enum: + - "Personal" + - "Global" + order: 1 + title: "scope type" + from: + type: "string" + order: 2 + title: "From" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 source-vwo: type: "object" required: @@ -7882,6 +8463,39 @@ components: \ data before this date will not be replicated." type: "string" format: "date-time" + source-paperform: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "paperform" + const: "paperform" + enum: + - "paperform" + order: 0 + type: "string" + source-paperform-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-microsoft-sharepoint: title: "Microsoft SharePoint Source Spec" description: @@ -9291,6 +9905,35 @@ components: >here. The token is case sensitive." airbyte_secret: true order: 4 + source-tinyemail: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "tinyemail" + const: "tinyemail" + enum: + - "tinyemail" + order: 0 + type: "string" + source-tinyemail-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true source-lob: type: "object" required: @@ -9448,6 +10091,117 @@ components: description: "Start date" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" order: 2 + source-apptivo: + type: "object" + required: + - "api_key" + - "access_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your Apptivo account under Business\ + \ Settings -> API Access." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_key: + type: "string" + order: 1 + title: "Access Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "apptivo" + const: "apptivo" + enum: + - "apptivo" + order: 0 + type: "string" + source-apptivo-update: + type: "object" + required: + - "api_key" + - "access_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your Apptivo account under Business\ + \ Settings -> API Access." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + access_key: + type: "string" + order: 1 + title: "Access Key" + airbyte_secret: true + source-zonka-feedback: + type: "object" + required: + - "datacenter" + - "auth_token" + - "sourceType" + properties: + datacenter: + type: "string" + description: + "The identifier for the data center, such as 'us1' or 'e' for\ + \ EU." + enum: + - "us1" + - "e" + name: "dc_id" + order: 0 + title: "Data Center ID" + auth_token: + type: "string" + description: + "Auth token to use. Generate it by navigating to Company Settings\ + \ > Developers > API in your Zonka Feedback account." + name: "auth_token" + order: 1 + title: "Auth Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zonka-feedback" + const: "zonka-feedback" + enum: + - "zonka-feedback" + order: 0 + type: "string" + source-zonka-feedback-update: + type: "object" + required: + - "datacenter" + - "auth_token" + properties: + datacenter: + type: "string" + description: + "The identifier for the data center, such as 'us1' or 'e' for\ + \ EU." + enum: + - "us1" + - "e" + name: "dc_id" + order: 0 + title: "Data Center ID" + auth_token: + type: "string" + description: + "Auth token to use. Generate it by navigating to Company Settings\ + \ > Developers > API in your Zonka Feedback account." + name: "auth_token" + order: 1 + title: "Auth Token" + airbyte_secret: true source-orb: type: "object" required: @@ -10338,6 +11092,199 @@ components: type: "string" order: 2 title: "workspace" + source-fillout: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Developer settings tab of your\ + \ Fillout account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "fillout" + const: "fillout" + enum: + - "fillout" + order: 0 + type: "string" + source-fillout-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Developer settings tab of your\ + \ Fillout account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-inflowinventory: + type: "object" + required: + - "api_key" + - "companyid" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + companyid: + type: "string" + order: 1 + title: "CompanyID" + sourceType: + title: "inflowinventory" + const: "inflowinventory" + enum: + - "inflowinventory" + order: 0 + type: "string" + source-inflowinventory-update: + type: "object" + required: + - "api_key" + - "companyid" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + companyid: + type: "string" + order: 1 + title: "CompanyID" + source-clockodo: + type: "object" + required: + - "api_key" + - "email_address" + - "external_application" + - "years" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the 'Personal data' section of\ + \ your Clockodo account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + email_address: + type: "string" + description: + "Your Clockodo account email address. Find it in your Clockodo\ + \ account settings." + name: "email_address" + order: 1 + title: "Email Address" + external_application: + type: "string" + description: + "Identification of the calling application, including the email\ + \ address of a technical contact person. Format: [name of application\ + \ or company];[email address]." + name: "external_application" + order: 2 + title: "External Application Header" + default: "Airbyte" + years: + type: "array" + description: "2024, 2025" + title: "Years" + order: 3 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 + sourceType: + title: "clockodo" + const: "clockodo" + enum: + - "clockodo" + order: 0 + type: "string" + source-clockodo-update: + type: "object" + required: + - "api_key" + - "email_address" + - "external_application" + - "years" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the 'Personal data' section of\ + \ your Clockodo account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + email_address: + type: "string" + description: + "Your Clockodo account email address. Find it in your Clockodo\ + \ account settings." + name: "email_address" + order: 1 + title: "Email Address" + external_application: + type: "string" + description: + "Identification of the calling application, including the email\ + \ address of a technical contact person. Format: [name of application\ + \ or company];[email address]." + name: "external_application" + order: 2 + title: "External Application Header" + default: "Airbyte" + years: + type: "array" + description: "2024, 2025" + title: "Years" + order: 3 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 source-google-webfonts: type: "object" required: @@ -11481,6 +12428,103 @@ components: title: "API Key" airbyte_secret: true order: 0 + source-zoho-analytics-metadata-api: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "data_center" + - "org_id" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 2 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - "com.cn" + - "jp" + order: 3 + title: "Data Center" + default: "com" + org_id: + type: "number" + order: 4 + title: "Org Id" + sourceType: + title: "zoho-analytics-metadata-api" + const: "zoho-analytics-metadata-api" + enum: + - "zoho-analytics-metadata-api" + order: 0 + type: "string" + source-zoho-analytics-metadata-api-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "data_center" + - "org_id" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 2 + title: "OAuth Refresh Token" + airbyte_secret: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - "com.cn" + - "jp" + order: 3 + title: "Data Center" + default: "com" + org_id: + type: "number" + order: 4 + title: "Org Id" source-buzzsprout: type: "object" required: @@ -11536,6 +12580,43 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 2 + source-rocketlane: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the API section in Settings\ + \ of your Rocketlane account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "rocketlane" + const: "rocketlane" + enum: + - "rocketlane" + order: 0 + type: "string" + source-rocketlane-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the API section in Settings\ + \ of your Rocketlane account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-youtube-analytics: title: "YouTube Analytics Spec" type: "object" @@ -11611,6 +12692,35 @@ components: "A refresh token generated using the above client ID and\ \ secret" airbyte_secret: true + source-systeme: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "systeme" + const: "systeme" + enum: + - "systeme" + order: 0 + type: "string" + source-systeme-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true source-zendesk-talk: type: "object" title: "Source Zendesk Talk Spec" @@ -11850,6 +12960,76 @@ components: \ example.thinkific.com, your subdomain is \"example\"." order: 1 title: "subdomain" + source-papersign: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "papersign" + const: "papersign" + enum: + - "papersign" + order: 0 + type: "string" + source-papersign-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-eventzilla: + type: "object" + required: + - "x-api-key" + - "sourceType" + properties: + x-api-key: + type: "string" + description: + "API key to use. Generate it by creating a new application\ + \ within your Eventzilla account settings under Settings > App Management." + name: "x-api-key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "eventzilla" + const: "eventzilla" + enum: + - "eventzilla" + order: 0 + type: "string" + source-eventzilla-update: + type: "object" + required: + - "x-api-key" + properties: + x-api-key: + type: "string" + description: + "API key to use. Generate it by creating a new application\ + \ within your Eventzilla account settings under Settings > App Management." + name: "x-api-key" + order: 0 + title: "API Key" + airbyte_secret: true source-plausible: type: "object" required: @@ -12113,6 +13293,85 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-zoho-campaign: + type: "object" + required: + - "client_id_2" + - "client_secret_2" + - "client_refresh_token" + - "data_center" + - "sourceType" + properties: + client_id_2: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret_2: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - ".jp" + - ".com.cn" + order: 3 + title: "Data Center" + sourceType: + title: "zoho-campaign" + const: "zoho-campaign" + enum: + - "zoho-campaign" + order: 0 + type: "string" + source-zoho-campaign-update: + type: "object" + required: + - "client_id_2" + - "client_secret_2" + - "client_refresh_token" + - "data_center" + properties: + client_id_2: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret_2: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - ".jp" + - ".com.cn" + order: 3 + title: "Data Center" source-oura: type: "object" required: @@ -12172,6 +13431,46 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" description: "End datetime to sync until. Default is current UTC datetime." + source-cin7: + type: "object" + required: + - "accountid" + - "api_key" + - "sourceType" + properties: + accountid: + type: "string" + description: "The ID associated with your account." + order: 0 + title: "Account ID" + api_key: + type: "string" + description: "The API key associated with your account." + order: 1 + title: "API Key" + sourceType: + title: "cin7" + const: "cin7" + enum: + - "cin7" + order: 0 + type: "string" + source-cin7-update: + type: "object" + required: + - "accountid" + - "api_key" + properties: + accountid: + type: "string" + description: "The ID associated with your account." + order: 0 + title: "Account ID" + api_key: + type: "string" + description: "The API key associated with your account." + order: 1 + title: "API Key" source-looker: type: "object" required: @@ -12308,6 +13607,55 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-campayn: + type: "object" + required: + - "sub_domain" + - "api_key" + - "sourceType" + properties: + sub_domain: + type: "string" + name: "domain" + title: "Sub Domain" + order: 0 + api_key: + type: "string" + description: + "API key to use. Find it in your Campayn account settings.\ + \ Keep it secure as it grants access to your Campayn data." + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "campayn" + const: "campayn" + enum: + - "campayn" + order: 0 + type: "string" + source-campayn-update: + type: "object" + required: + - "sub_domain" + - "api_key" + properties: + sub_domain: + type: "string" + name: "domain" + title: "Sub Domain" + order: 0 + api_key: + type: "string" + description: + "API key to use. Find it in your Campayn account settings.\ + \ Keep it secure as it grants access to your Campayn data." + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 1 source-google-tasks: type: "object" required: @@ -13725,6 +15073,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 7 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" host: title: "Host Address" description: "The server host address" @@ -13779,7 +15170,9 @@ components: description: "The Private key" multiline: true order: 4 + airbyte_secret: true type: "string" + x-speakeasy-param-sensitive: true required: - "private_key" - "auth_type" @@ -14287,6 +15680,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 7 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" host: title: "Host Address" description: "The server host address" @@ -14340,6 +15776,7 @@ components: description: "The Private key" multiline: true order: 4 + airbyte_secret: true type: "string" required: - "private_key" @@ -15714,6 +17151,57 @@ components: default: false order: 4 type: "boolean" + source-oncehub: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your OnceHub account under the\ + \ API & Webhooks Integration page." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "oncehub" + const: "oncehub" + enum: + - "oncehub" + order: 0 + type: "string" + source-oncehub-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your OnceHub account under the\ + \ API & Webhooks Integration page." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-aws-cloudtrail: title: "Aws CloudTrail Spec" type: "object" @@ -15935,6 +17423,18 @@ components: \ for more info." default: false order: 6 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: "The number of worker threads to use for the sync." + order: 7 sourceType: title: "jira" const: "jira" @@ -16027,6 +17527,18 @@ components: \ for more info." default: false order: 6 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: "The number of worker threads to use for the sync." + order: 7 source-smartwaiver: type: "object" required: @@ -16088,6 +17600,61 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 2 + source-bunny-inc: + type: "object" + required: + - "subdomain" + - "apikey" + - "sourceType" + properties: + apikey: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "The subdomain specific to your Bunny account or service." + name: "subdomain" + order: 0 + title: "Subdomain" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "bunny-inc" + const: "bunny-inc" + enum: + - "bunny-inc" + order: 0 + type: "string" + source-bunny-inc-update: + type: "object" + required: + - "subdomain" + - "apikey" + properties: + apikey: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "The subdomain specific to your Bunny account or service." + name: "subdomain" + order: 0 + title: "Subdomain" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-hubspot: title: "HubSpot Source Spec" type: "object" @@ -16395,6 +17962,86 @@ components: \ pagination will begin with that number to end of available comics" default: "2960" order: 0 + source-jobnimbus: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it by logging into your JobNimbus account,\ + \ navigating to settings, and creating a new API key under the API section." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "jobnimbus" + const: "jobnimbus" + enum: + - "jobnimbus" + order: 0 + type: "string" + source-jobnimbus-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it by logging into your JobNimbus account,\ + \ navigating to settings, and creating a new API key under the API section." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-marketstack: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "marketstack" + const: "marketstack" + enum: + - "marketstack" + order: 0 + type: "string" + source-marketstack-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 source-zenloop: title: "Zenloop Spec" type: "object" @@ -16566,6 +18213,23 @@ components: - "2.0" default: "2.0" order: 3 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 4 sourceType: title: "chargebee" const: "chargebee" @@ -16620,6 +18284,23 @@ components: - "2.0" default: "2.0" order: 3 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 4 source-wrike: type: "object" required: @@ -17654,6 +19335,45 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-firehydrant: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Bot token to use for authenticating with the FireHydrant API.\ + \ You can find or create a bot token by logging into your organization\ + \ and visiting the Bot users page at https://app.firehydrant.io/organizations/bots." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "firehydrant" + const: "firehydrant" + enum: + - "firehydrant" + order: 0 + type: "string" + source-firehydrant-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Bot token to use for authenticating with the FireHydrant API.\ + \ You can find or create a bot token by logging into your organization\ + \ and visiting the Bot users page at https://app.firehydrant.io/organizations/bots." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-concord: type: "object" required: @@ -17705,6 +19425,64 @@ components: name: "organizationId" order: 1 title: "Environment" + source-e-conomic: + type: "object" + required: + - "app_secret_token" + - "agreement_grant_token" + - "sourceType" + properties: + app_secret_token: + type: "string" + description: + "Your private token that identifies your app. Find it in your\ + \ e-conomic account settings." + name: "app_secret_token" + order: 0 + title: "App Secret Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + agreement_grant_token: + type: "string" + description: + "Token that identifies the grant issued by an agreement, allowing\ + \ your app to access data. Obtain it from your e-conomic account settings." + name: "agreement_grant_token" + order: 1 + title: "Agreement Grant Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "e-conomic" + const: "e-conomic" + enum: + - "e-conomic" + order: 0 + type: "string" + source-e-conomic-update: + type: "object" + required: + - "app_secret_token" + - "agreement_grant_token" + properties: + app_secret_token: + type: "string" + description: + "Your private token that identifies your app. Find it in your\ + \ e-conomic account settings." + name: "app_secret_token" + order: 0 + title: "App Secret Token" + airbyte_secret: true + agreement_grant_token: + type: "string" + description: + "Token that identifies the grant issued by an agreement, allowing\ + \ your app to access data. Obtain it from your e-conomic account settings." + name: "agreement_grant_token" + order: 1 + title: "Agreement Grant Token" + airbyte_secret: true source-appfollow: type: "object" required: @@ -18011,6 +19789,55 @@ components: description: "API Secret" airbyte_secret: true order: 0 + source-gitbook: + type: "object" + required: + - "access_token" + - "space_id" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Personal access token for authenticating with the GitBook\ + \ API. You can view and manage your access tokens in the Developer settings\ + \ of your GitBook user account." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + space_id: + type: "string" + order: 1 + title: "Space Id" + sourceType: + title: "gitbook" + const: "gitbook" + enum: + - "gitbook" + order: 0 + type: "string" + source-gitbook-update: + type: "object" + required: + - "access_token" + - "space_id" + properties: + access_token: + type: "string" + description: + "Personal access token for authenticating with the GitBook\ + \ API. You can view and manage your access tokens in the Developer settings\ + \ of your GitBook user account." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + space_id: + type: "string" + order: 1 + title: "Space Id" source-miro: type: "object" required: @@ -18333,6 +20160,111 @@ components: \ issues fetching the stream, or checking the connection please set this\ \ to `False` instead." default: true + source-finnworlds: + type: "object" + required: + - "key" + - "start_date" + - "sourceType" + properties: + list: + type: "string" + description: "Choose isin, ticker, reg_lei or cik" + order: 0 + title: "List" + default: "ticker" + list_countries_for_bonds: + type: "string" + order: 1 + title: "List Countries for Bonds" + default: "country" + key: + type: "string" + order: 2 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + bond_type: + type: "array" + description: "For example 10y, 5y, 2y..." + order: 3 + title: "Bond Type" + countries: + type: "array" + description: "brazil, united states, italia, japan" + order: 4 + title: "Countries" + tickers: + type: "array" + description: "AAPL, T, MU, GOOG" + order: 5 + title: "Tickers" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + commodities: + type: "array" + description: "Options Available: beef, cheese, oil, ..." + order: 7 + title: "Commodities" + sourceType: + title: "finnworlds" + const: "finnworlds" + enum: + - "finnworlds" + order: 0 + type: "string" + source-finnworlds-update: + type: "object" + required: + - "key" + - "start_date" + properties: + list: + type: "string" + description: "Choose isin, ticker, reg_lei or cik" + order: 0 + title: "List" + default: "ticker" + list_countries_for_bonds: + type: "string" + order: 1 + title: "List Countries for Bonds" + default: "country" + key: + type: "string" + order: 2 + title: "API Key" + airbyte_secret: true + bond_type: + type: "array" + description: "For example 10y, 5y, 2y..." + order: 3 + title: "Bond Type" + countries: + type: "array" + description: "brazil, united states, italia, japan" + order: 4 + title: "Countries" + tickers: + type: "array" + description: "AAPL, T, MU, GOOG" + order: 5 + title: "Tickers" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + commodities: + type: "array" + description: "Options Available: beef, cheese, oil, ..." + order: 7 + title: "Commodities" source-google-calendar: type: "object" required: @@ -18629,12 +20561,178 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-zoho-billing: + type: "object" + required: + - "region" + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + name: "region" + order: 0 + title: "Region" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zoho-billing" + const: "zoho-billing" + enum: + - "zoho-billing" + order: 0 + type: "string" + source-zoho-billing-update: + type: "object" + required: + - "region" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + name: "region" + order: 0 + title: "Region" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + source-akeneo: + type: "object" + required: + - "host" + - "api_username" + - "password" + - "client_id" + - "sourceType" + properties: + host: + type: "string" + description: "https://cb8715249e.trial.akeneo.cloud" + order: 0 + title: "Host" + api_username: + type: "string" + order: 1 + title: "API Username" + password: + type: "string" + order: 2 + title: "Password" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + order: 3 + title: "Client ID" + secret: + type: "string" + order: 4 + title: "Secret" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "akeneo" + const: "akeneo" + enum: + - "akeneo" + order: 0 + type: "string" + source-akeneo-update: + type: "object" + required: + - "host" + - "api_username" + - "password" + - "client_id" + properties: + host: + type: "string" + description: "https://cb8715249e.trial.akeneo.cloud" + order: 0 + title: "Host" + api_username: + type: "string" + order: 1 + title: "API Username" + password: + type: "string" + order: 2 + title: "Password" + airbyte_secret: true + client_id: + type: "string" + order: 3 + title: "Client ID" + secret: + type: "string" + order: 4 + title: "Secret" + always_show: true + airbyte_secret: true source-amazon-ads: - title: "Amazon Ads Spec" + title: "Source Amazon Ads" type: "object" properties: auth_type: title: "Auth Type" + default: "oauth2.0" const: "oauth2.0" order: 0 type: "string" @@ -18646,9 +20744,9 @@ components: "The client ID of your Amazon Ads developer application. See\ \ the docs for more information." + airbyte_secret: true order: 1 type: "string" - airbyte_secret: true x-speakeasy-param-sensitive: true client_secret: title: "Client Secret" @@ -18674,25 +20772,176 @@ components: description: "Region to pull data from (EU/NA/FE). See docs for more details." + default: "NA" enum: - "NA" - "EU" - "FE" - type: "string" - default: "NA" order: 4 + type: "string" start_date: title: "Start Date" description: "The Start date for collecting reports, should not be more\ \ than 60 days in the past. In YYYY-MM-DD format" + examples: + - "2022-10-10" + - "2022-10-22" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 5 + type: "string" format: "date" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + default: [] + order: 8 + type: "array" + items: + title: "StateFilterEnum" + description: "An enumeration." + enum: + - "enabled" + - "paused" + - "archived" + type: "string" + uniqueItems: true + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + default: 3 + examples: + - 3 + - 10 + order: 9 + type: "integer" + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + default: [] + order: 10 + type: "array" + items: + title: "ReportRecordTypeEnum" + description: "An enumeration." + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "string" + uniqueItems: true + sourceType: + title: "amazon-ads" + const: "amazon-ads" + enum: + - "amazon-ads" + order: 0 + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + source-amazon-ads-update: + title: "Source Amazon Ads" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2.0" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + airbyte_secret: true + order: 1 + type: "string" + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + default: "NA" + enum: + - "NA" + - "EU" + - "FE" + order: 4 + type: "string" + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" examples: - "2022-10-10" - "2022-10-22" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" order: 5 type: "string" + format: "date" profiles: title: "Profile IDs" description: @@ -18722,170 +20971,29 @@ components: "Reflects the state of the Display, Product, and Brand Campaign\ \ streams as enabled, paused, or archived. If you do not populate this\ \ field, it will be ignored completely." - items: - type: "string" - enum: - - "enabled" - - "paused" - - "archived" - type: "array" - uniqueItems: true + default: [] order: 8 - look_back_window: - title: "Look Back Window" - description: - "The amount of days to go back in time to get the updated data\ - \ from Amazon Ads" - examples: - - 3 - - 10 - type: "integer" - default: 3 - order: 9 - report_record_types: - title: "Report Record Types" - description: - "Optional configuration which accepts an array of string of\ - \ record types. Leave blank for default behaviour to pull all report types.\ - \ Use this config option only if you want to pull specific report type(s).\ - \ See docs for more details" - items: - type: "string" - enum: - - "adGroups" - - "asins" - - "asins_keywords" - - "asins_targets" - - "campaigns" - - "keywords" - - "productAds" - - "targets" - type: "array" - uniqueItems: true - order: 10 - sourceType: - title: "amazon-ads" - const: "amazon-ads" - enum: - - "amazon-ads" - order: 0 - type: "string" - required: - - "client_id" - - "client_secret" - - "refresh_token" - - "sourceType" - source-amazon-ads-update: - title: "Amazon Ads Spec" - type: "object" - properties: - auth_type: - title: "Auth Type" - const: "oauth2.0" - order: 0 - type: "string" - enum: - - "oauth2.0" - client_id: - title: "Client ID" - description: - "The client ID of your Amazon Ads developer application. See\ - \ the docs for more information." - order: 1 - type: "string" - airbyte_secret: true - client_secret: - title: "Client Secret" - description: - "The client secret of your Amazon Ads developer application.\ - \ See the docs for more information." - airbyte_secret: true - order: 2 - type: "string" - refresh_token: - title: "Refresh Token" - description: - "Amazon Ads refresh token. See the docs for more information on how to obtain this token." - airbyte_secret: true - order: 3 - type: "string" - region: - title: "Region" - description: - "Region to pull data from (EU/NA/FE). See docs for more details." - enum: - - "NA" - - "EU" - - "FE" - type: "string" - default: "NA" - order: 4 - start_date: - title: "Start Date" - description: - "The Start date for collecting reports, should not be more\ - \ than 60 days in the past. In YYYY-MM-DD format" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - format: "date" - examples: - - "2022-10-10" - - "2022-10-22" - order: 5 - type: "string" - profiles: - title: "Profile IDs" - description: - "Profile IDs you want to fetch data for. The Amazon Ads source\ - \ connector supports only profiles with seller and vendor type, profiles\ - \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ - \ profiles will be selected if they match the Profile ID OR the Marketplace\ - \ ID." - order: 6 type: "array" items: - type: "integer" - marketplace_ids: - title: "Marketplace IDs" - description: - "Marketplace IDs you want to fetch data for. Note: If Profile\ - \ IDs are also selected, profiles will be selected if they match the Profile\ - \ ID OR the Marketplace ID." - order: 7 - type: "array" - items: - type: "string" - state_filter: - title: "State Filter" - description: - "Reflects the state of the Display, Product, and Brand Campaign\ - \ streams as enabled, paused, or archived. If you do not populate this\ - \ field, it will be ignored completely." - items: - type: "string" + title: "StateFilterEnum" + description: "An enumeration." enum: - "enabled" - "paused" - "archived" - type: "array" + type: "string" uniqueItems: true - order: 8 look_back_window: title: "Look Back Window" description: "The amount of days to go back in time to get the updated data\ \ from Amazon Ads" + default: 3 examples: - 3 - 10 - type: "integer" - default: 3 order: 9 + type: "integer" report_record_types: title: "Report Record Types" description: @@ -18894,8 +21002,12 @@ components: \ Use this config option only if you want to pull specific report type(s).\ \ See docs for more details" + default: [] + order: 10 + type: "array" items: - type: "string" + title: "ReportRecordTypeEnum" + description: "An enumeration." enum: - "adGroups" - "asins" @@ -18905,9 +21017,8 @@ components: - "keywords" - "productAds" - "targets" - type: "array" + type: "string" uniqueItems: true - order: 10 required: - "client_id" - "client_secret" @@ -19430,6 +21541,133 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 3 + source-newsdata-io: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + search_query: + type: "string" + description: + "Search news articles for specific keywords or phrases present\ + \ in the news title, content, URL, meta keywords and meta description." + title: "Search Query" + order: 1 + countries: + type: "array" + description: + "Search the news articles from a specific country. You can\ + \ add up to 5 countries in a single query. Example: au, jp, br" + title: "Countries" + order: 2 + categories: + type: "array" + description: + "Search the news articles for a specific category. You can\ + \ add up to 5 categories in a single query." + title: "Categories" + order: 3 + languages: + type: "array" + description: + "Search the news articles for a specific language. You can\ + \ add up to 5 languages in a single query. " + title: "Languages" + order: 4 + domains: + type: "array" + description: + "Search the news articles for specific domains or news sources.\ + \ You can add up to 5 domains in a single query. " + title: "Domains" + order: 5 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + end_date: + type: "string" + description: "Choose an end date. Now UTC is default value" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 7 + sourceType: + title: "newsdata-io" + const: "newsdata-io" + enum: + - "newsdata-io" + order: 0 + type: "string" + source-newsdata-io-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + search_query: + type: "string" + description: + "Search news articles for specific keywords or phrases present\ + \ in the news title, content, URL, meta keywords and meta description." + title: "Search Query" + order: 1 + countries: + type: "array" + description: + "Search the news articles from a specific country. You can\ + \ add up to 5 countries in a single query. Example: au, jp, br" + title: "Countries" + order: 2 + categories: + type: "array" + description: + "Search the news articles for a specific category. You can\ + \ add up to 5 categories in a single query." + title: "Categories" + order: 3 + languages: + type: "array" + description: + "Search the news articles for a specific language. You can\ + \ add up to 5 languages in a single query. " + title: "Languages" + order: 4 + domains: + type: "array" + description: + "Search the news articles for specific domains or news sources.\ + \ You can add up to 5 domains in a single query. " + title: "Domains" + order: 5 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + end_date: + type: "string" + description: "Choose an end date. Now UTC is default value" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 7 source-clazar: type: "object" required: @@ -21599,6 +23837,45 @@ components: >here." airbyte_secret: true order: 0 + source-repairshopr: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + title: "subdomain" + order: 1 + sourceType: + title: "repairshopr" + const: "repairshopr" + enum: + - "repairshopr" + order: 0 + type: "string" + source-repairshopr-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + title: "subdomain" + order: 1 source-hubplanner: title: "Hubplanner Spec" type: "object" @@ -23110,6 +25387,53 @@ components: type: "string" airbyte_secret: true order: 4 + source-lightspeed-retail: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key or access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "The subdomain for the retailer, e.g., 'example' in 'example.retail.lightspeed.app'." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "lightspeed-retail" + const: "lightspeed-retail" + enum: + - "lightspeed-retail" + order: 0 + type: "string" + source-lightspeed-retail-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: "API key or access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "The subdomain for the retailer, e.g., 'example' in 'example.retail.lightspeed.app'." + name: "subdomain" + order: 1 + title: "Subdomain" source-postmarkapp: type: "object" required: @@ -23156,6 +25480,107 @@ components: order: 1 title: "X-Postmark-Account-Token" airbyte_secret: true + source-finnhub: + type: "object" + required: + - "api_key" + - "symbols" + - "market_news_category" + - "exchange" + - "start_date_2" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + symbols: + type: "array" + name: "company_symbol" + order: 1 + title: "Companies" + market_news_category: + type: "string" + description: + "This parameter can be 1 of the following values general, forex,\ + \ crypto, merger." + title: "Market News Category" + default: "general" + enum: + - "general" + - "forex" + - "crypto" + - "merger" + order: 2 + exchange: + type: "string" + description: "More info: https://finnhub.io/docs/api/stock-symbols" + title: "Exchange" + default: "US" + order: 3 + start_date_2: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 + sourceType: + title: "finnhub" + const: "finnhub" + enum: + - "finnhub" + order: 0 + type: "string" + source-finnhub-update: + type: "object" + required: + - "api_key" + - "symbols" + - "market_news_category" + - "exchange" + - "start_date_2" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + symbols: + type: "array" + name: "company_symbol" + order: 1 + title: "Companies" + market_news_category: + type: "string" + description: + "This parameter can be 1 of the following values general, forex,\ + \ crypto, merger." + title: "Market News Category" + default: "general" + enum: + - "general" + - "forex" + - "crypto" + - "merger" + order: 2 + exchange: + type: "string" + description: "More info: https://finnhub.io/docs/api/stock-symbols" + title: "Exchange" + default: "US" + order: 3 + start_date_2: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 source-float: type: "object" required: @@ -23266,6 +25691,66 @@ components: minimum: 1 default: 1000 order: 0 + source-onfleet: + type: "object" + required: + - "api_key" + - "password" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use for authenticating requests. You can create\ + \ and manage your API keys in the API section of the Onfleet dashboard." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + password: + type: "string" + description: + "Placeholder for basic HTTP auth password - should be set to\ + \ empty string" + name: "password" + order: 1 + title: "Placeholder Password" + default: "x" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "onfleet" + const: "onfleet" + enum: + - "onfleet" + order: 0 + type: "string" + source-onfleet-update: + type: "object" + required: + - "api_key" + - "password" + properties: + api_key: + type: "string" + description: + "API key to use for authenticating requests. You can create\ + \ and manage your API keys in the API section of the Onfleet dashboard." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + password: + type: "string" + description: + "Placeholder for basic HTTP auth password - should be set to\ + \ empty string" + name: "password" + order: 1 + title: "Placeholder Password" + default: "x" + airbyte_secret: true source-gorgias: type: "object" required: @@ -23806,7 +26291,118 @@ components: type: "string" airbyte_secret: true order: 1 - x-speakeasy-param-sensitive: true + x-speakeasy-param-sensitive: true + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + sourceType: + title: "sftp" + const: "sftp" + enum: + - "sftp" + order: 0 + type: "string" + source-sftp-update: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 - title: "SSH Key Authentication" required: - "auth_method" @@ -23828,7 +26424,6 @@ components: airbyte_secret: true multiline: true order: 1 - x-speakeasy-param-sensitive: true file_types: title: "File types" description: @@ -23858,116 +26453,75 @@ components: examples: - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" order: 6 + source-agilecrm: + type: "object" + required: + - "email" + - "domain" + - "api_key" + - "sourceType" + properties: + email: + type: "string" + description: + "Your Agile CRM account email address. This is used as the\ + \ username for authentication." + name: "email" + order: 0 + title: "Email Address" + domain: + type: "string" + description: "The specific subdomain for your Agile CRM account" + name: "domain" + order: 1 + title: "Domain" + api_key: + type: "string" + description: + "API key to use. Find it at Admin Settings -> API & Analytics\ + \ -> API Key in your Agile CRM account." + name: "api_key" + order: 2 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true sourceType: - title: "sftp" - const: "sftp" + title: "agilecrm" + const: "agilecrm" enum: - - "sftp" + - "agilecrm" order: 0 type: "string" - source-sftp-update: - title: "SFTP Source Spec" + source-agilecrm-update: type: "object" required: - - "user" - - "host" - - "port" + - "email" + - "domain" + - "api_key" properties: - user: - title: "User Name" - description: "The server user" + email: type: "string" + description: + "Your Agile CRM account email address. This is used as the\ + \ username for authentication." + name: "email" order: 0 - host: - title: "Host Address" - description: "The server host address" + title: "Email Address" + domain: type: "string" - examples: - - "www.host.com" - - "192.0.2.1" + description: "The specific subdomain for your Agile CRM account" + name: "domain" order: 1 - port: - title: "Port" - description: "The server port" - type: "integer" - default: 22 - examples: - - "22" - order: 2 - credentials: - type: "object" - title: "Authentication" - description: "The server authentication method" - order: 3 - oneOf: - - title: "Password Authentication" - required: - - "auth_method" - - "auth_user_password" - properties: - auth_method: - description: "Connect through password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - enum: - - "SSH_PASSWORD_AUTH" - auth_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 1 - - title: "SSH Key Authentication" - required: - - "auth_method" - - "auth_ssh_key" - properties: - auth_method: - description: "Connect through ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - enum: - - "SSH_KEY_AUTH" - auth_ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 1 - file_types: - title: "File types" - description: - "Coma separated file types. Currently only 'csv' and 'json'\ - \ types are supported." - type: "string" - default: "csv,json" - order: 4 - examples: - - "csv,json" - - "csv" - folder_path: - title: "Folder Path" - description: "The directory to search files for sync" + title: "Domain" + api_key: type: "string" - default: "" - examples: - - "/logs/2022" - order: 5 - file_pattern: - title: "File Pattern" description: - "The regular expression to specify files for sync in a chosen\ - \ Folder Path" - type: "string" - default: "" - examples: - - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" - order: 6 + "API key to use. Find it at Admin Settings -> API & Analytics\ + \ -> API Key in your Agile CRM account." + name: "api_key" + order: 2 + title: "API Key" + airbyte_secret: true source-google-drive: title: "Google Drive Source Spec" description: @@ -24987,6 +27541,45 @@ components: >here." airbyte_secret: true order: 2 + source-pabbly-subscriptions-billing: + type: "object" + required: + - "username" + - "sourceType" + properties: + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + username: + type: "string" + order: 0 + title: "Username" + sourceType: + title: "pabbly-subscriptions-billing" + const: "pabbly-subscriptions-billing" + enum: + - "pabbly-subscriptions-billing" + order: 0 + type: "string" + source-pabbly-subscriptions-billing-update: + type: "object" + required: + - "username" + properties: + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + username: + type: "string" + order: 0 + title: "Username" source-chameleon: type: "object" required: @@ -26343,8 +28936,61 @@ components: title: "Refresh token" airbyte_secret: true order: 4 + source-taboola: + type: "object" + required: + - "client_id" + - "client_secret" + - "account_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + description: "The ID associated with your taboola account" + order: 2 + title: "Account ID" + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "taboola" + const: "taboola" + enum: + - "taboola" + order: 0 + type: "string" + source-taboola-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "account_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + account_id: + type: "string" + description: "The ID associated with your taboola account" + order: 2 + title: "Account ID" + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true source-qualaroo: - title: "Qualaroo Spec" type: "object" required: - "token" @@ -26354,39 +29000,43 @@ components: properties: token: type: "string" - title: "API token" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API token" airbyte_secret: true + order: 0 x-speakeasy-param-sensitive: true key: type: "string" - title: "API key" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API key" airbyte_secret: true + order: 1 x-speakeasy-param-sensitive: true start_date: type: "string" - title: "Start Date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ \ data before this date will not be replicated." + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" examples: - "2021-03-01T00:00:00.000Z" + order: 2 survey_ids: type: "array" - items: - type: "string" - pattern: "^[0-9]{1,8}$" - title: "Qualaroo survey IDs" description: "IDs of the surveys from which you'd like to replicate data.\ \ If left empty, data from all surveys to which you have access will be\ \ replicated." + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + order: 3 sourceType: title: "qualaroo" const: "qualaroo" @@ -26395,7 +29045,6 @@ components: order: 0 type: "string" source-qualaroo-update: - title: "Qualaroo Spec" type: "object" required: - "token" @@ -26404,37 +29053,41 @@ components: properties: token: type: "string" - title: "API token" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API token" airbyte_secret: true + order: 0 key: type: "string" - title: "API key" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API key" airbyte_secret: true + order: 1 start_date: type: "string" - title: "Start Date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ \ data before this date will not be replicated." + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" examples: - "2021-03-01T00:00:00.000Z" + order: 2 survey_ids: type: "array" - items: - type: "string" - pattern: "^[0-9]{1,8}$" - title: "Qualaroo survey IDs" description: "IDs of the surveys from which you'd like to replicate data.\ \ If left empty, data from all surveys to which you have access will be\ \ replicated." + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + order: 3 source-front: type: "object" required: @@ -26837,6 +29490,283 @@ components: additionalProperties: true order: 3 title: "Authentication mechanism" + source-sendowl: + type: "object" + required: + - "username" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + description: "Enter you API Key" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API secret" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "sendowl" + const: "sendowl" + enum: + - "sendowl" + order: 0 + type: "string" + source-sendowl-update: + type: "object" + required: + - "username" + - "start_date" + properties: + username: + type: "string" + description: "Enter you API Key" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API secret" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-getgist: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Integration Settings on your\ + \ Gist dashboard at https://app.getgist.com/projects/_/settings/api-key." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "getgist" + const: "getgist" + enum: + - "getgist" + order: 0 + type: "string" + source-getgist-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Integration Settings on your\ + \ Gist dashboard at https://app.getgist.com/projects/_/settings/api-key." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-mailtrap: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://mailtrap.io/account" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "mailtrap" + const: "mailtrap" + enum: + - "mailtrap" + order: 0 + type: "string" + source-mailtrap-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://mailtrap.io/account" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-cloudbeds: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "cloudbeds" + const: "cloudbeds" + enum: + - "cloudbeds" + order: 0 + type: "string" + source-cloudbeds-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + source-freshbooks: + type: "object" + required: + - "client_id" + - "client_secret" + - "redirect_uri" + - "account_id" + - "client_refresh_token" + - "business_uuid" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + redirect_uri: + type: "string" + order: 2 + title: "Redirect Uri" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + order: 3 + title: "Account Id" + client_refresh_token: + type: "string" + order: 4 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + oauth_access_token: + type: "string" + description: + "The current access token. This field might be overridden by\ + \ the connector based on the token refresh endpoint response." + order: 5 + title: "Access token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + oauth_token_expiry_date: + type: "string" + description: + "The date the current access token expires in. This field might\ + \ be overridden by the connector based on the token refresh endpoint response." + order: 6 + title: "Token expiry date" + format: "date-time" + business_uuid: + type: "string" + order: 7 + title: "Business uuid" + sourceType: + title: "freshbooks" + const: "freshbooks" + enum: + - "freshbooks" + order: 0 + type: "string" + source-freshbooks-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "redirect_uri" + - "account_id" + - "client_refresh_token" + - "business_uuid" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + redirect_uri: + type: "string" + order: 2 + title: "Redirect Uri" + airbyte_secret: true + account_id: + type: "string" + order: 3 + title: "Account Id" + client_refresh_token: + type: "string" + order: 4 + title: "Refresh token" + airbyte_secret: true + oauth_access_token: + type: "string" + description: + "The current access token. This field might be overridden by\ + \ the connector based on the token refresh endpoint response." + order: 5 + title: "Access token" + airbyte_secret: true + oauth_token_expiry_date: + type: "string" + description: + "The date the current access token expires in. This field might\ + \ be overridden by the connector based on the token refresh endpoint response." + order: 6 + title: "Token expiry date" + format: "date-time" + business_uuid: + type: "string" + order: 7 + title: "Business uuid" source-just-sift: type: "object" required: @@ -27414,31 +30344,31 @@ components: properties: api_key: type: "string" - title: "API Key" - airbyte_secret: true description: "Recurly API Key. See the docs for more information on how to generate this key." order: 0 + title: "API Key" + airbyte_secret: true x-speakeasy-param-sensitive: true begin_time: type: "string" description: "ISO8601 timestamp from which the replication from Recurly\ \ API will start from." + order: 1 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 1 end_time: type: "string" description: "ISO8601 timestamp to which the replication from Recurly API\ \ will stop. Records after that date won't be imported." + order: 2 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 2 sourceType: title: "recurly" const: "recurly" @@ -27453,30 +30383,30 @@ components: properties: api_key: type: "string" - title: "API Key" - airbyte_secret: true description: "Recurly API Key. See the docs for more information on how to generate this key." order: 0 + title: "API Key" + airbyte_secret: true begin_time: type: "string" description: "ISO8601 timestamp from which the replication from Recurly\ \ API will start from." + order: 1 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 1 end_time: type: "string" description: "ISO8601 timestamp to which the replication from Recurly API\ \ will stop. Records after that date won't be imported." + order: 2 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 2 source-pennylane: type: "object" required: @@ -27644,6 +30574,7 @@ components: title: "Zendesk Chat Spec" type: "object" required: + - "subdomain" - "start_date" - "sourceType" properties: @@ -27661,8 +30592,12 @@ components: type: "string" title: "Subdomain" description: - "Required if you access Zendesk Chat from a Zendesk Support\ - \ subdomain." + "The unique subdomain of your Zendesk account (without https://).\ + \ See the Zendesk docs to find your subdomain" + pattern: "^(?!https://)" + examples: + - "myzendeskchat" default: "" credentials: title: "Authorization Method" @@ -27734,6 +30669,7 @@ components: title: "Zendesk Chat Spec" type: "object" required: + - "subdomain" - "start_date" properties: start_date: @@ -27750,8 +30686,12 @@ components: type: "string" title: "Subdomain" description: - "Required if you access Zendesk Chat from a Zendesk Support\ - \ subdomain." + "The unique subdomain of your Zendesk account (without https://).\ + \ See the Zendesk docs to find your subdomain" + pattern: "^(?!https://)" + examples: + - "myzendeskchat" default: "" credentials: title: "Authorization Method" @@ -28249,6 +31189,147 @@ components: order: 2 title: "API Endpoint Prefix" default: "api" + source-nocrm: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the admin section of your\ + \ noCRM.io account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: + "The subdomain specific to your noCRM.io account, e.g., 'yourcompany'\ + \ in 'yourcompany.nocrm.io'." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "nocrm" + const: "nocrm" + enum: + - "nocrm" + order: 0 + type: "string" + source-nocrm-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the admin section of your\ + \ noCRM.io account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: + "The subdomain specific to your noCRM.io account, e.g., 'yourcompany'\ + \ in 'yourcompany.nocrm.io'." + name: "subdomain" + order: 1 + title: "Subdomain" + source-openaq: + type: "object" + required: + - "api_key" + - "country_ids" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + country_ids: + type: "array" + description: + "The list of IDs of countries (comma separated) you need the\ + \ data for, check more: https://docs.openaq.org/resources/countries" + order: 1 + title: "Countries" + sourceType: + title: "openaq" + const: "openaq" + enum: + - "openaq" + order: 0 + type: "string" + source-openaq-update: + type: "object" + required: + - "api_key" + - "country_ids" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + country_ids: + type: "array" + description: + "The list of IDs of countries (comma separated) you need the\ + \ data for, check more: https://docs.openaq.org/resources/countries" + order: 1 + title: "Countries" + source-deputy: + type: "object" + required: + - "base_url" + - "api_key" + - "sourceType" + properties: + base_url: + type: "string" + description: "The base url for your deputy account to make API requests" + order: 0 + title: "Base URL" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "deputy" + const: "deputy" + enum: + - "deputy" + order: 0 + type: "string" + source-deputy-update: + type: "object" + required: + - "base_url" + - "api_key" + properties: + base_url: + type: "string" + description: "The base url for your deputy account to make API requests" + order: 0 + title: "Base URL" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true source-workflowmax: type: "object" required: @@ -28374,6 +31455,81 @@ components: always_show: true airbyte_secret: true order: 3 + source-stockdata: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + symbols: + type: "array" + order: 1 + title: "Symbols" + industries: + type: "array" + description: + "Specify the industries of entities which have been identified\ + \ within the article." + order: 2 + title: "Industries" + filter_entities: + type: "boolean" + order: 3 + title: "Entities" + default: false + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "stockdata" + const: "stockdata" + enum: + - "stockdata" + order: 0 + type: "string" + source-stockdata-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + symbols: + type: "array" + order: 1 + title: "Symbols" + industries: + type: "array" + description: + "Specify the industries of entities which have been identified\ + \ within the article." + order: 2 + title: "Industries" + filter_entities: + type: "boolean" + order: 3 + title: "Entities" + default: false + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-dockerhub: type: "object" required: @@ -31203,6 +34359,43 @@ components: order: 0 title: "Bearer Token" airbyte_secret: true + source-ubidots: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use for authentication. Obtain it from your Ubidots\ + \ account." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "ubidots" + const: "ubidots" + enum: + - "ubidots" + order: 0 + type: "string" + source-ubidots-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use for authentication. Obtain it from your Ubidots\ + \ account." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-height: type: "object" required: @@ -31340,6 +34533,43 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-ruddr: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it in the API Keys section of your\ + \ Ruddr workspace settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "ruddr" + const: "ruddr" + enum: + - "ruddr" + order: 0 + type: "string" + source-ruddr-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it in the API Keys section of your\ + \ Ruddr workspace settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-polygon-stock-api: type: "object" required: @@ -32007,13 +35237,14 @@ components: - id: "advanced" title: "Advanced" source-retently: - title: "Retently Api Spec" type: "object" + required: + - "sourceType" properties: credentials: - title: "Authentication Mechanism" - description: "Choose how to authenticate to Retently" type: "object" + description: "Choose how to authenticate to Retently" + title: "Authentication Mechanism" oneOf: - type: "object" title: "Authenticate via Retently (OAuth)" @@ -32030,21 +35261,21 @@ components: enum: - "Client" client_id: - title: "Client ID" type: "string" description: "The Client ID of your Retently developer application." + title: "Client ID" client_secret: - title: "Client Secret" type: "string" description: "The Client Secret of your Retently developer application." + title: "Client Secret" airbyte_secret: true x-speakeasy-param-sensitive: true refresh_token: - title: "Refresh Token" type: "string" description: "Retently Refresh Token which can be used to fetch new\ \ Bearer Tokens when the current one expires." + title: "Refresh Token" airbyte_secret: true x-speakeasy-param-sensitive: true - type: "object" @@ -32060,13 +35291,14 @@ components: enum: - "Token" api_key: - title: "API Token" + type: "string" description: "Retently API Token. See the docs for more information on how to obtain this key." - type: "string" + title: "API Token" airbyte_secret: true x-speakeasy-param-sensitive: true + order: 0 sourceType: title: "retently" const: "retently" @@ -32075,13 +35307,13 @@ components: order: 0 type: "string" source-retently-update: - title: "Retently Api Spec" type: "object" + required: [] properties: credentials: - title: "Authentication Mechanism" - description: "Choose how to authenticate to Retently" type: "object" + description: "Choose how to authenticate to Retently" + title: "Authentication Mechanism" oneOf: - type: "object" title: "Authenticate via Retently (OAuth)" @@ -32098,20 +35330,20 @@ components: enum: - "Client" client_id: - title: "Client ID" type: "string" description: "The Client ID of your Retently developer application." + title: "Client ID" client_secret: - title: "Client Secret" type: "string" description: "The Client Secret of your Retently developer application." + title: "Client Secret" airbyte_secret: true refresh_token: - title: "Refresh Token" type: "string" description: "Retently Refresh Token which can be used to fetch new\ \ Bearer Tokens when the current one expires." + title: "Refresh Token" airbyte_secret: true - type: "object" title: "Authenticate with API Token" @@ -32126,12 +35358,13 @@ components: enum: - "Token" api_key: - title: "API Token" + type: "string" description: "Retently API Token. See the docs for more information on how to obtain this key." - type: "string" + title: "API Token" airbyte_secret: true + order: 0 source-jotform: type: "object" required: @@ -32346,6 +35579,45 @@ components: type: "string" required: - "access_token" + source-nutshell: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "API Token" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "nutshell" + const: "nutshell" + enum: + - "nutshell" + order: 0 + type: "string" + source-nutshell-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "API Token" + always_show: true + airbyte_secret: true source-dbt: type: "object" required: @@ -32871,6 +36143,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 6 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" bucket: title: "Bucket" description: "Name of the S3 bucket where the file(s) exist." @@ -33345,6 +36660,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 6 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" bucket: title: "Bucket" description: "Name of the S3 bucket where the file(s) exist." @@ -33520,32 +36878,88 @@ components: examples: - "2020-10-15T00:00:00Z" order: 3 + source-box: + type: "object" + required: + - "client_id" + - "client_secret" + - "user" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + user: + type: "number" + order: 2 + title: "User" + sourceType: + title: "box" + const: "box" + enum: + - "box" + order: 0 + type: "string" + source-box-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "user" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + user: + type: "number" + order: 2 + title: "User" source-zendesk-sunshine: type: "object" required: - - "start_date" - "subdomain" + - "start_date" - "sourceType" properties: subdomain: type: "string" + description: "The subdomain for your Zendesk Account." order: 0 title: "Subdomain" - description: "The subdomain for your Zendesk Account." start_date: type: "string" - title: "Start date" - format: "date-time" description: "The date from which you'd like to replicate data for Zendesk\ \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + title: "Start date" + format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-01-01T00:00:00Z" order: 1 credentials: - title: "Authorization Method" type: "object" + title: "Authorization Method" oneOf: - type: "object" title: "OAuth2.0" @@ -33564,20 +36978,20 @@ components: order: 0 client_id: type: "string" - title: "Client ID" description: "The Client ID of your OAuth application." + title: "Client ID" airbyte_secret: true x-speakeasy-param-sensitive: true client_secret: type: "string" - title: "Client Secret" description: "The Client Secret of your OAuth application." + title: "Client Secret" airbyte_secret: true x-speakeasy-param-sensitive: true access_token: type: "string" - title: "Access Token" description: "Long-term access Token for making authenticated requests." + title: "Access Token" airbyte_secret: true x-speakeasy-param-sensitive: true - type: "object" @@ -33596,16 +37010,17 @@ components: order: 1 api_token: type: "string" - title: "API Token" description: "API Token. See the docs for information on how to generate this key." + title: "API Token" airbyte_secret: true x-speakeasy-param-sensitive: true email: type: "string" - title: "Email" description: "The user email for your Zendesk account" + title: "Email" + order: 2 sourceType: title: "zendesk-sunshine" const: "zendesk-sunshine" @@ -33616,28 +37031,28 @@ components: source-zendesk-sunshine-update: type: "object" required: - - "start_date" - "subdomain" + - "start_date" properties: subdomain: type: "string" + description: "The subdomain for your Zendesk Account." order: 0 title: "Subdomain" - description: "The subdomain for your Zendesk Account." start_date: type: "string" - title: "Start date" - format: "date-time" description: "The date from which you'd like to replicate data for Zendesk\ \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + title: "Start date" + format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-01-01T00:00:00Z" order: 1 credentials: - title: "Authorization Method" type: "object" + title: "Authorization Method" oneOf: - type: "object" title: "OAuth2.0" @@ -33656,18 +37071,18 @@ components: order: 0 client_id: type: "string" - title: "Client ID" description: "The Client ID of your OAuth application." + title: "Client ID" airbyte_secret: true client_secret: type: "string" - title: "Client Secret" description: "The Client Secret of your OAuth application." + title: "Client Secret" airbyte_secret: true access_token: type: "string" - title: "Access Token" description: "Long-term access Token for making authenticated requests." + title: "Access Token" airbyte_secret: true - type: "object" title: "API Token" @@ -33685,15 +37100,16 @@ components: order: 1 api_token: type: "string" - title: "API Token" description: "API Token. See the docs for information on how to generate this key." + title: "API Token" airbyte_secret: true email: type: "string" - title: "Email" description: "The user email for your Zendesk account" + title: "Email" + order: 2 source-mention: type: "object" required: @@ -34537,6 +37953,55 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-track-pms: + type: "object" + required: + - "customer_domain" + - "api_key" + - "sourceType" + properties: + customer_domain: + type: "string" + order: 0 + title: "Customer Domain" + api_key: + type: "string" + order: 1 + title: "API Key" + api_secret: + type: "string" + order: 2 + title: "API Secret" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "track-pms" + const: "track-pms" + enum: + - "track-pms" + order: 0 + type: "string" + source-track-pms-update: + type: "object" + required: + - "customer_domain" + - "api_key" + properties: + customer_domain: + type: "string" + order: 0 + title: "Customer Domain" + api_key: + type: "string" + order: 1 + title: "API Key" + api_secret: + type: "string" + order: 2 + title: "API Secret" + always_show: true + airbyte_secret: true source-whisky-hunter: type: "object" required: @@ -34803,6 +38268,45 @@ components: "The date from which you'd like to replicate data for Salesloft\ \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ \ date will be replicated." + source-youtube-data: + type: "object" + required: + - "api_key" + - "channel_ids" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + channel_ids: + type: "array" + order: 1 + title: "Channel IDs" + sourceType: + title: "youtube-data" + const: "youtube-data" + enum: + - "youtube-data" + order: 0 + type: "string" + source-youtube-data-update: + type: "object" + required: + - "api_key" + - "channel_ids" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + channel_ids: + type: "array" + order: 1 + title: "Channel IDs" source-yandex-metrica: title: "Yandex Metrica Spec" type: "object" @@ -35581,6 +39085,94 @@ components: - "2023-08-05T00:43:59.244Z" default: "2023-08-05T00:43:59.244Z" airbyte_secret: false + source-cal-com: + type: "object" + required: + - "orgId" + - "api_key" + - "sourceType" + properties: + orgId: + type: "string" + name: "Organization ID" + order: 0 + title: "orgId" + api_key: + type: "string" + description: "API key to use. Find it at https://cal.com/account" + name: "api_key" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "cal-com" + const: "cal-com" + enum: + - "cal-com" + order: 0 + type: "string" + source-cal-com-update: + type: "object" + required: + - "orgId" + - "api_key" + properties: + orgId: + type: "string" + name: "Organization ID" + order: 0 + title: "orgId" + api_key: + type: "string" + description: "API key to use. Find it at https://cal.com/account" + name: "api_key" + order: 1 + title: "API Key" + airbyte_secret: true + source-oveit: + type: "object" + required: + - "email" + - "password" + - "sourceType" + properties: + email: + type: "string" + description: "Oveit's login Email" + order: 0 + title: "Email" + password: + type: "string" + description: "Oveit's login Password" + order: 1 + title: "Password" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "oveit" + const: "oveit" + enum: + - "oveit" + order: 0 + type: "string" + source-oveit-update: + type: "object" + required: + - "email" + - "password" + properties: + email: + type: "string" + description: "Oveit's login Email" + order: 0 + title: "Email" + password: + type: "string" + description: "Oveit's login Password" + order: 1 + title: "Password" + airbyte_secret: true source-clockify: type: "object" required: @@ -37431,6 +41023,57 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-vercel: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token to authenticate with the Vercel API. Create and\ + \ manage tokens in your Vercel account settings." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "vercel" + const: "vercel" + enum: + - "vercel" + order: 0 + type: "string" + source-vercel-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Access token to authenticate with the Vercel API. Create and\ + \ manage tokens in your Vercel account settings." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-orbit: type: "object" required: @@ -37632,6 +41275,140 @@ components: title: "Domain name" description: "Your Confluence domain name" order: 2 + source-zoho-expense: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + data_center: + type: "string" + description: + "The domain suffix for the Zoho Expense API based on your data\ + \ center location (e.g., 'com', 'eu', 'in', etc.)" + enum: + - "com" + - "in" + - "jp" + - "ca" + - "com.cn" + - "sa" + - "com.au" + - "eu" + name: "domain" + order: 0 + title: "Data Center" + default: "com" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zoho-expense" + const: "zoho-expense" + enum: + - "zoho-expense" + order: 0 + type: "string" + source-zoho-expense-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + data_center: + type: "string" + description: + "The domain suffix for the Zoho Expense API based on your data\ + \ center location (e.g., 'com', 'eu', 'in', etc.)" + enum: + - "com" + - "in" + - "jp" + - "ca" + - "com.cn" + - "sa" + - "com.au" + - "eu" + name: "domain" + order: 0 + title: "Data Center" + default: "com" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + source-formbricks: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate and find it in your Postman\ + \ account settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "formbricks" + const: "formbricks" + enum: + - "formbricks" + order: 0 + type: "string" + source-formbricks-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate and find it in your Postman\ + \ account settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-coin-api: title: "Coin API Spec" type: "object" @@ -37806,6 +41583,105 @@ components: \ to the name of the project . Example: 44056" order: 1 title: "Project Id" + source-zoho-bigin: + type: "object" + required: + - "client_id" + - "data_center" + - "client_secret" + - "client_refresh_token" + - "module_name" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + description: "The data center where the Bigin account's resources are hosted" + enum: + - "com" + - "com.au" + - "eu" + - "in" + - "com.cn" + - "jp" + name: "data_center" + order: 1 + title: "Data Center" + default: "com" + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 3 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + module_name: + type: "string" + order: 4 + title: "Module Name" + sourceType: + title: "zoho-bigin" + const: "zoho-bigin" + enum: + - "zoho-bigin" + order: 0 + type: "string" + source-zoho-bigin-update: + type: "object" + required: + - "client_id" + - "data_center" + - "client_secret" + - "client_refresh_token" + - "module_name" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + data_center: + type: "string" + description: "The data center where the Bigin account's resources are hosted" + enum: + - "com" + - "com.au" + - "eu" + - "in" + - "com.cn" + - "jp" + name: "data_center" + order: 1 + title: "Data Center" + default: "com" + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 3 + title: "Refresh token" + airbyte_secret: true + module_name: + type: "string" + order: 4 + title: "Module Name" source-slack: title: "Slack Spec" type: "object" @@ -38051,6 +41927,61 @@ components: >docs for instructions on how to generate it." airbyte_secret: true order: 1 + source-tremendous: + type: "object" + required: + - "api_key" + - "environment" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate an API key through the Tremendous\ + \ dashboard under Team Settings > Developers. Save the key once you’ve\ + \ generated it." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + environment: + type: "string" + enum: + - "api" + - "testflight" + order: 1 + title: "Environment" + sourceType: + title: "tremendous" + const: "tremendous" + enum: + - "tremendous" + order: 0 + type: "string" + source-tremendous-update: + type: "object" + required: + - "api_key" + - "environment" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate an API key through the Tremendous\ + \ dashboard under Team Settings > Developers. Save the key once you’ve\ + \ generated it." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + environment: + type: "string" + enum: + - "api" + - "testflight" + order: 1 + title: "Environment" source-gainsight-px: type: "object" required: @@ -38086,6 +42017,35 @@ components: "The Aptrinsic API Key which is recieved from the dashboard\ \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" order: 0 + source-humanitix: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "humanitix" + const: "humanitix" + enum: + - "humanitix" + order: 0 + type: "string" + source-humanitix-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 source-plaid: type: "object" required: @@ -39712,6 +43672,99 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-zoho-invoice: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "region" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: "To be provided if a user belongs to multiple organizations" + order: 3 + title: "Organization ID" + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + order: 4 + title: "Region" + sourceType: + title: "zoho-invoice" + const: "zoho-invoice" + enum: + - "zoho-invoice" + order: 0 + type: "string" + source-zoho-invoice-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "region" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + organization_id: + type: "string" + description: "To be provided if a user belongs to multiple organizations" + order: 3 + title: "Organization ID" + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + order: 4 + title: "Region" source-breezy-hr: type: "object" required: @@ -40684,6 +44737,95 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-revolut-merchant: + type: "object" + required: + - "api_version" + - "secret_api_key" + - "start_date" + - "environment" + - "sourceType" + properties: + api_version: + type: "string" + description: + "Specify the API version to use. This is required for certain\ + \ API calls. Example: '2024-09-01'." + name: "api_version" + title: "API Version" + order: 0 + secret_api_key: + type: "string" + description: + "Secret API key to use for authenticating with the Revolut\ + \ Merchant API. Find it in your Revolut Business account under APIs >\ + \ Merchant API." + name: "secret_api_key" + title: "Secret API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + environment: + type: "string" + description: "The base url of your environment. Either sandbox or production" + title: "environment" + enum: + - "sandbox-merchant" + - "merchant" + order: 3 + sourceType: + title: "revolut-merchant" + const: "revolut-merchant" + enum: + - "revolut-merchant" + order: 0 + type: "string" + source-revolut-merchant-update: + type: "object" + required: + - "api_version" + - "secret_api_key" + - "start_date" + - "environment" + properties: + api_version: + type: "string" + description: + "Specify the API version to use. This is required for certain\ + \ API calls. Example: '2024-09-01'." + name: "api_version" + title: "API Version" + order: 0 + secret_api_key: + type: "string" + description: + "Secret API key to use for authenticating with the Revolut\ + \ Merchant API. Find it in your Revolut Business account under APIs >\ + \ Merchant API." + name: "secret_api_key" + title: "Secret API Key" + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + environment: + type: "string" + description: "The base url of your environment. Either sandbox or production" + title: "environment" + enum: + - "sandbox-merchant" + - "merchant" + order: 3 source-hibob: type: "object" required: @@ -41335,6 +45477,23 @@ components: \ token." airbyte_secret: true x-speakeasy-param-sensitive: true + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Zendesk Support plan. More\ + \ info about the rate limit plan tiers can be found on Zendesk's API docs." + order: 3 sourceType: title: "zendesk-support" const: "zendesk-support" @@ -41437,6 +45596,23 @@ components: >full documentation for more information on generating this\ \ token." airbyte_secret: true + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Zendesk Support plan. More\ + \ info about the rate limit plan tiers can be found on Zendesk's API docs." + order: 3 source-veeqo: type: "object" required: @@ -41899,6 +46075,45 @@ components: title: "Client Secret" airbyte_secret: true order: 4 + source-spotlercrm: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access Token to authenticate API requests. Generate it by\ + \ logging into your CRM system, navigating to Settings / Integrations\ + \ / API V4, and clicking 'generate new key'." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "spotlercrm" + const: "spotlercrm" + enum: + - "spotlercrm" + order: 0 + type: "string" + source-spotlercrm-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + description: + "Access Token to authenticate API requests. Generate it by\ + \ logging into your CRM system, navigating to Settings / Integrations\ + \ / API V4, and clicking 'generate new key'." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true source-ashby: type: "object" required: @@ -41991,6 +46206,48 @@ components: \ Access and select API integration." airbyte_secret: true order: 0 + source-freightview: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "freightview" + const: "freightview" + enum: + - "freightview" + order: 0 + type: "string" + source-freightview-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client Secret" + airbyte_secret: true source-onesignal: type: "object" required: @@ -46688,6 +50945,43 @@ components: pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" order: 1 format: "date-time" + source-pretix: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Obtain it from the pretix web interface\ + \ by creating a new token under your team settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "pretix" + const: "pretix" + enum: + - "pretix" + order: 0 + type: "string" + source-pretix-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Obtain it from the pretix web interface\ + \ by creating a new token under your team settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-cimis: type: "object" required: @@ -47254,6 +51548,61 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-blogger: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "blogger" + const: "blogger" + enum: + - "blogger" + order: 0 + type: "string" + source-blogger-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true source-codefresh: type: "object" required: @@ -48642,6 +52991,92 @@ components: >docs for more information on where to find that key." airbyte_secret: true order: 0 + source-web-scrapper: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://cloud.webscraper.io/api" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "web-scrapper" + const: "web-scrapper" + enum: + - "web-scrapper" + order: 0 + type: "string" + source-web-scrapper-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://cloud.webscraper.io/api" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-wufoo: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your Wufoo API Key. You can find it by logging into your Wufoo\ + \ account, selecting 'API Information' from the 'More' dropdown on any\ + \ form, and locating the 16-digit code." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "Your account subdomain/username for Wufoo." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "wufoo" + const: "wufoo" + enum: + - "wufoo" + order: 0 + type: "string" + source-wufoo-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: + "Your Wufoo API Key. You can find it by logging into your Wufoo\ + \ account, selecting 'API Information' from the 'More' dropdown on any\ + \ form, and locating the 16-digit code." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "Your account subdomain/username for Wufoo." + name: "subdomain" + order: 1 + title: "Subdomain" source-configcat: type: "object" required: @@ -48758,6 +53193,181 @@ components: description: "Your Insightly API token." airbyte_secret: true order: 1 + source-zoho-desk: + type: "object" + required: + - "client_id" + - "client_secret" + - "token_refresh_endpoint" + - "refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_refresh_endpoint: + type: "string" + name: "token_refresh_endpoint" + order: 2 + title: "Token Refresh Endpoint" + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + include_custom_domain: + type: "boolean" + order: 4 + title: "include Custom Domain" + sourceType: + title: "zoho-desk" + const: "zoho-desk" + enum: + - "zoho-desk" + order: 0 + type: "string" + source-zoho-desk-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "token_refresh_endpoint" + - "refresh_token" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + token_refresh_endpoint: + type: "string" + name: "token_refresh_endpoint" + order: 2 + title: "Token Refresh Endpoint" + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + include_custom_domain: + type: "boolean" + order: 4 + title: "include Custom Domain" + source-pipeliner: + type: "object" + required: + - "username" + - "service" + - "spaceid" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + service: + type: "string" + enum: + - "eu-central" + - "us-east" + - "ca-central" + - "ap-southeast" + order: 2 + title: "Data Center" + spaceid: + type: "string" + order: 3 + title: "Space ID" + sourceType: + title: "pipeliner" + const: "pipeliner" + enum: + - "pipeliner" + order: 0 + type: "string" + source-pipeliner-update: + type: "object" + required: + - "username" + - "service" + - "spaceid" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + service: + type: "string" + enum: + - "eu-central" + - "us-east" + - "ca-central" + - "ap-southeast" + order: 2 + title: "Data Center" + spaceid: + type: "string" + order: 3 + title: "Space ID" + source-opinion-stage: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "opinion-stage" + const: "opinion-stage" + enum: + - "opinion-stage" + order: 0 + type: "string" + source-opinion-stage-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 source-cart: title: "Cart.com Spec" type: "object" @@ -49910,89 +54520,164 @@ components: order: 1 title: "OAuth Client ID" airbyte_secret: true - x-speakeasy-param-sensitive: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "zoho-inventory" + const: "zoho-inventory" + enum: + - "zoho-inventory" + order: 0 + type: "string" + source-zoho-inventory-update: + type: "object" + required: + - "domain" + - "client_id" + - "client_secret" + - "refresh_token" + - "start_date" + properties: + domain: + type: "string" + description: + "The domain suffix for the Zoho Inventory API based on your\ + \ data center location (e.g., 'com', 'eu', 'in', etc.)" + enum: + - "com" + - "in" + - "jp" + - "eu" + - "com.au" + - "ca" + - "com.cn" + - "sa" + name: "domain" + order: 0 + title: "Domain" + default: "com" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true client_secret: type: "string" name: "client_secret" order: 2 title: "OAuth Client Secret" airbyte_secret: true - x-speakeasy-param-sensitive: true refresh_token: type: "string" name: "refresh_token" order: 3 title: "OAuth Refresh Token" airbyte_secret: true - x-speakeasy-param-sensitive: true start_date: type: "string" order: 4 title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-salesflare: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Enter you api key like this : Bearer YOUR_API_KEY" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true sourceType: - title: "zoho-inventory" - const: "zoho-inventory" + title: "salesflare" + const: "salesflare" enum: - - "zoho-inventory" + - "salesflare" order: 0 type: "string" - source-zoho-inventory-update: + source-salesflare-update: type: "object" required: - - "domain" - - "client_id" - - "client_secret" - - "refresh_token" + - "api_key" + properties: + api_key: + type: "string" + description: "Enter you api key like this : Bearer YOUR_API_KEY" + order: 0 + title: "API Key" + airbyte_secret: true + source-brevo: + type: "object" + required: + - "api_key" - "start_date" + - "sourceType" properties: - domain: + api_key: type: "string" - description: - "The domain suffix for the Zoho Inventory API based on your\ - \ data center location (e.g., 'com', 'eu', 'in', etc.)" - enum: - - "com" - - "in" - - "jp" - - "eu" - - "com.au" - - "ca" - - "com.cn" - - "sa" - name: "domain" order: 0 - title: "Domain" - default: "com" - client_id: + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: type: "string" - name: "client_id" order: 1 - title: "OAuth Client ID" - airbyte_secret: true - client_secret: + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "brevo" + const: "brevo" + enum: + - "brevo" + order: 0 type: "string" - name: "client_secret" - order: 2 - title: "OAuth Client Secret" - airbyte_secret: true - refresh_token: + source-brevo-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: type: "string" - name: "refresh_token" - order: 3 - title: "OAuth Refresh Token" + order: 0 + title: "API Key" airbyte_secret: true start_date: type: "string" - order: 4 + order: 1 title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - source-brevo: + source-finage: type: "object" required: - "api_key" + - "symbols" - "start_date" - "sourceType" properties: @@ -50002,23 +54687,84 @@ components: title: "API Key" airbyte_secret: true x-speakeasy-param-sensitive: true + symbols: + type: "array" + description: "List of symbols " + order: 1 + title: "Symbols" + tech_indicator_type: + type: "string" + description: "One of DEMA, EMA, SMA, WMA, RSI, TEMA, Williams, ADX " + enum: + - "DEMA" + - "EMA" + - "SMA" + - "WMA" + - "RSI" + - "TEMA" + - "Williams" + - "ADX" + order: 2 + title: "Technical Indicator Type" + default: "SMA" + time: + type: "string" + enum: + - "daily" + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + order: 3 + title: "Time Interval" + default: "daily" + period: + type: "string" + description: "Time period. Default is 10" + order: 4 + title: "Period" + time_aggregates: + type: "string" + description: "Size of the time" + enum: + - "minute" + - "hour" + - "day" + - "week" + - "month" + - "quarter" + - "year" + order: 5 + title: "Time aggregates" + default: "day" + time_period: + type: "string" + description: "Time Period for cash flow stmts" + enum: + - "annual" + - "quarter" + order: 6 + title: "Time Period" start_date: type: "string" - order: 1 title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 7 sourceType: - title: "brevo" - const: "brevo" + title: "finage" + const: "finage" enum: - - "brevo" + - "finage" order: 0 type: "string" - source-brevo-update: + source-finage-update: type: "object" required: - "api_key" + - "symbols" - "start_date" properties: api_key: @@ -50026,12 +54772,72 @@ components: order: 0 title: "API Key" airbyte_secret: true + symbols: + type: "array" + description: "List of symbols " + order: 1 + title: "Symbols" + tech_indicator_type: + type: "string" + description: "One of DEMA, EMA, SMA, WMA, RSI, TEMA, Williams, ADX " + enum: + - "DEMA" + - "EMA" + - "SMA" + - "WMA" + - "RSI" + - "TEMA" + - "Williams" + - "ADX" + order: 2 + title: "Technical Indicator Type" + default: "SMA" + time: + type: "string" + enum: + - "daily" + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + order: 3 + title: "Time Interval" + default: "daily" + period: + type: "string" + description: "Time period. Default is 10" + order: 4 + title: "Period" + time_aggregates: + type: "string" + description: "Size of the time" + enum: + - "minute" + - "hour" + - "day" + - "week" + - "month" + - "quarter" + - "year" + order: 5 + title: "Time aggregates" + default: "day" + time_period: + type: "string" + description: "Time Period for cash flow stmts" + enum: + - "annual" + - "quarter" + order: 6 + title: "Time Period" start_date: type: "string" - order: 1 title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 7 source-datascope: type: "object" required: @@ -50667,6 +55473,45 @@ components: - "1day" - "1week" - "1month" + source-smartreach: + type: "object" + required: + - "api_key" + - "teamid" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + teamid: + type: "number" + title: "TeamID" + order: 1 + sourceType: + title: "smartreach" + const: "smartreach" + enum: + - "smartreach" + order: 0 + type: "string" + source-smartreach-update: + type: "object" + required: + - "api_key" + - "teamid" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + teamid: + type: "number" + title: "TeamID" + order: 1 source-monday: title: "Monday Spec" type: "object" @@ -50805,6 +55650,170 @@ components: title: "Personal API Token" description: "API Token for making authenticated requests." airbyte_secret: true + source-onepagecrm: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + description: "Enter the user ID of your API app" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API Key of your API app" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "onepagecrm" + const: "onepagecrm" + enum: + - "onepagecrm" + order: 0 + type: "string" + source-onepagecrm-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + description: "Enter the user ID of your API app" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API Key of your API app" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + source-financial-modelling: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + exchange: + type: "string" + description: + "The stock exchange : AMEX, AMS, AQS, ASX, ATH, BER, BME, BRU,\ + \ BSE, BUD, BUE, BVC, CAI, CBOE, CNQ, CPH, DFM, DOH, DUS, DXE, EGX, EURONEXT,\ + \ HAM, HEL, HKSE, ICE, IOB, IST, JKT, JNB, JPX, KLS, KOE, KSC, KUW, LSE,\ + \ MCX, MEX, MIL, MUN, NASDAQ, NEO, NSE, NYSE, NZE, OEM, OQX, OSL, OTC,\ + \ PNK, PRA, RIS, SAO, SAU, SES, SET, SGO, SHH, SHZ, SIX, STO, STU, TAI,\ + \ TLV, TSX, TSXV, TWO, VIE, VSE, WSE, XETRA" + order: 1 + title: "Exchange" + default: "NASDAQ" + marketcapmorethan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap more\ + \ than the give marketcap" + order: 2 + title: "Market Cap More Than" + marketcaplowerthan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap lower\ + \ than the give marketcap" + order: 3 + title: "Market Cap Lower Than" + time_frame: + type: "string" + description: "For example 1min, 5min, 15min, 30min, 1hour, 4hour" + order: 4 + title: "Time Frame" + default: "4hour" + enum: + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + sourceType: + title: "financial-modelling" + const: "financial-modelling" + enum: + - "financial-modelling" + order: 0 + type: "string" + source-financial-modelling-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + exchange: + type: "string" + description: + "The stock exchange : AMEX, AMS, AQS, ASX, ATH, BER, BME, BRU,\ + \ BSE, BUD, BUE, BVC, CAI, CBOE, CNQ, CPH, DFM, DOH, DUS, DXE, EGX, EURONEXT,\ + \ HAM, HEL, HKSE, ICE, IOB, IST, JKT, JNB, JPX, KLS, KOE, KSC, KUW, LSE,\ + \ MCX, MEX, MIL, MUN, NASDAQ, NEO, NSE, NYSE, NZE, OEM, OQX, OSL, OTC,\ + \ PNK, PRA, RIS, SAO, SAU, SES, SET, SGO, SHH, SHZ, SIX, STO, STU, TAI,\ + \ TLV, TSX, TSXV, TWO, VIE, VSE, WSE, XETRA" + order: 1 + title: "Exchange" + default: "NASDAQ" + marketcapmorethan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap more\ + \ than the give marketcap" + order: 2 + title: "Market Cap More Than" + marketcaplowerthan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap lower\ + \ than the give marketcap" + order: 3 + title: "Market Cap Lower Than" + time_frame: + type: "string" + description: "For example 1min, 5min, 15min, 30min, 1hour, 4hour" + order: 4 + title: "Time Frame" + default: "4hour" + enum: + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 source-waiteraid: type: "object" required: @@ -51209,6 +56218,81 @@ components: description: "Identification token for app accessing data" airbyte_secret: true order: 1 + source-invoiceninja: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "invoiceninja" + const: "invoiceninja" + enum: + - "invoiceninja" + order: 0 + type: "string" + source-invoiceninja-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + source-sendpulse: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "sendpulse" + const: "sendpulse" + enum: + - "sendpulse" + order: 0 + type: "string" + source-sendpulse-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true source-bigquery: title: "BigQuery Source Spec" type: "object" @@ -51485,6 +56569,39 @@ components: >here." airbyte_secret: true order: 0 + source-tickettailor: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://www.getdrip.com/user/edit" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "tickettailor" + const: "tickettailor" + enum: + - "tickettailor" + order: 0 + type: "string" + source-tickettailor-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://www.getdrip.com/user/edit" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-calendly: type: "object" required: @@ -51621,6 +56738,72 @@ components: type: "string" title: "Engine" description: "Engine name to connect to." + source-eventee: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it at https://admin.eventee.co/\ + \ in 'Settings -> Features'." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "eventee" + const: "eventee" + enum: + - "eventee" + order: 0 + type: "string" + source-eventee-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it at https://admin.eventee.co/\ + \ in 'Settings -> Features'." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-simfin: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "simfin" + const: "simfin" + enum: + - "simfin" + order: 0 + type: "string" + source-simfin-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true source-pivotal-tracker: title: "Pivotal Tracker Spec" type: "object" @@ -51703,6 +56886,127 @@ components: description: "Date from when the sync should start in epoch Unix timestamp" order: 2 title: "Start Date" + source-brex: + type: "object" + required: + - "user_token" + - "start_date" + - "sourceType" + properties: + user_token: + type: "string" + description: + "User token to authenticate API requests. Generate it from\ + \ your Brex dashboard under Developer > Settings." + name: "user_token" + title: "User Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "brex" + const: "brex" + enum: + - "brex" + order: 0 + type: "string" + source-brex-update: + type: "object" + required: + - "user_token" + - "start_date" + properties: + user_token: + type: "string" + description: + "User token to authenticate API requests. Generate it from\ + \ your Brex dashboard under Developer > Settings." + name: "user_token" + title: "User Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-fulcrum: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://web.fulcrumapp.com/settings/api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "fulcrum" + const: "fulcrum" + enum: + - "fulcrum" + order: 0 + type: "string" + source-fulcrum-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://web.fulcrumapp.com/settings/api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-bigmailer: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can create and find it on the API key\ + \ management page in your BigMailer account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "bigmailer" + const: "bigmailer" + enum: + - "bigmailer" + order: 0 + type: "string" + source-bigmailer-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. You can create and find it on the API key\ + \ management page in your BigMailer account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-senseforce: type: "object" required: @@ -53420,6 +58724,21 @@ components: description: "The Client Secret of your Drift developer application." airbyte_secret: true title: null + snapchat-marketing: + properties: + client_id: + type: "string" + description: "The Client ID of your Snapchat developer application." + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret of your Snapchat developer application." + order: 1 + title: "Client Secret" + airbyte_secret: true + title: null gitlab: properties: credentials: @@ -53611,9 +58930,9 @@ components: "The client ID of your Amazon Ads developer application. See\ \ the docs for more information." + airbyte_secret: true order: 1 type: "string" - airbyte_secret: true client_secret: title: "Client Secret" description: @@ -53623,7 +58942,7 @@ components: airbyte_secret: true order: 2 type: "string" - title: "Amazon Ads Spec" + title: "Source Amazon Ads" github: properties: credentials: @@ -53773,20 +59092,6 @@ components: >here." airbyte_secret: true title: "Google Search Console Spec" - retently: - properties: - credentials: - properties: - client_id: - title: "Client ID" - type: "string" - description: "The Client ID of your Retently developer application." - client_secret: - title: "Client Secret" - type: "string" - description: "The Client Secret of your Retently developer application." - airbyte_secret: true - title: "Retently Api Spec" instagram: properties: client_id: @@ -53802,21 +59107,6 @@ components: airbyte_hidden: true type: "string" title: "Source Instagram" - zendesk-sunshine: - properties: - credentials: - properties: - client_id: - type: "string" - title: "Client ID" - description: "The Client ID of your OAuth application." - airbyte_secret: true - client_secret: - type: "string" - title: "Client Secret" - description: "The Client Secret of your OAuth application." - airbyte_secret: true - title: null snowflake: properties: credentials: @@ -67535,29 +72825,34 @@ components: - "destinationType" properties: motherduck_api_key: - title: "MotherDuck API Key" + title: "MotherDuck Access Token" type: "string" - description: "API key to use for authentication to a MotherDuck database." + description: + "API access token to use for authentication to a MotherDuck\ + \ database." airbyte_secret: true x-speakeasy-param-sensitive: true destination_path: title: "Destination DB" type: "string" description: - "Path to the .duckdb file, or the text 'md:' to connect to\ - \ MotherDuck. The file will be placed inside that local mount. For more\ - \ information check out our docs" + "Path to a .duckdb file or 'md:' to connect\ + \ to a MotherDuck database. If 'md:' is specified without a database name,\ + \ the default MotherDuck database name ('my_db') will be used." examples: - "/local/destination.duckdb" - "md:" - - "motherduck:" + - "md:data_db" + - "md:my_db" default: "md:" schema: - title: "Destination Schema" + title: "Schema Name" type: "string" - description: "Database schema name, default for duckdb is 'main'." - example: "main" + description: "Database schema name, defaults to 'main' if not specified." + examples: + - "main" + - "airbyte_raw" + - "my_schema" destinationType: title: "motherduck" const: "motherduck" @@ -67572,28 +72867,33 @@ components: - "motherduck_api_key" properties: motherduck_api_key: - title: "MotherDuck API Key" + title: "MotherDuck Access Token" type: "string" - description: "API key to use for authentication to a MotherDuck database." + description: + "API access token to use for authentication to a MotherDuck\ + \ database." airbyte_secret: true destination_path: title: "Destination DB" type: "string" description: - "Path to the .duckdb file, or the text 'md:' to connect to\ - \ MotherDuck. The file will be placed inside that local mount. For more\ - \ information check out our docs" + "Path to a .duckdb file or 'md:' to connect\ + \ to a MotherDuck database. If 'md:' is specified without a database name,\ + \ the default MotherDuck database name ('my_db') will be used." examples: - "/local/destination.duckdb" - "md:" - - "motherduck:" + - "md:data_db" + - "md:my_db" default: "md:" schema: - title: "Destination Schema" + title: "Schema Name" type: "string" - description: "Database schema name, default for duckdb is 'main'." - example: "main" + description: "Database schema name, defaults to 'main' if not specified." + examples: + - "main" + - "airbyte_raw" + - "my_schema" destination-s3: title: "S3 Destination Spec" type: "object" @@ -72542,10 +77842,10 @@ components: - notion - pinterest - rd-station-marketing - - retently - salesforce - slack - smartsheets + - snapchat-marketing - snowflake - surveymonkey - tiktok-marketing @@ -72553,7 +77853,6 @@ components: - typeform - youtube-analytics - zendesk-chat - - zendesk-sunshine - zendesk-support - zendesk-talk SourceConfiguration: @@ -72566,12 +77865,16 @@ components: $ref: "#/components/schemas/source-7shifts" - title: source-activecampaign $ref: "#/components/schemas/source-activecampaign" + - title: source-agilecrm + $ref: "#/components/schemas/source-agilecrm" - title: source-airbyte $ref: "#/components/schemas/source-airbyte" - title: source-aircall $ref: "#/components/schemas/source-aircall" - title: source-airtable $ref: "#/components/schemas/source-airtable" + - title: source-akeneo + $ref: "#/components/schemas/source-akeneo" - title: source-algolia $ref: "#/components/schemas/source-algolia" - title: source-amazon-ads @@ -72592,6 +77895,8 @@ components: $ref: "#/components/schemas/source-appfollow" - title: source-apple-search-ads $ref: "#/components/schemas/source-apple-search-ads" + - title: source-apptivo + $ref: "#/components/schemas/source-apptivo" - title: source-asana $ref: "#/components/schemas/source-asana" - title: source-ashby @@ -72610,12 +77915,18 @@ components: $ref: "#/components/schemas/source-basecamp" - title: source-beamer $ref: "#/components/schemas/source-beamer" + - title: source-bigmailer + $ref: "#/components/schemas/source-bigmailer" - title: source-bigquery $ref: "#/components/schemas/source-bigquery" - title: source-bing-ads $ref: "#/components/schemas/source-bing-ads" - title: source-bitly $ref: "#/components/schemas/source-bitly" + - title: source-blogger + $ref: "#/components/schemas/source-blogger" + - title: source-box + $ref: "#/components/schemas/source-box" - title: source-braintree $ref: "#/components/schemas/source-braintree" - title: source-braze @@ -72624,20 +77935,30 @@ components: $ref: "#/components/schemas/source-breezy-hr" - title: source-brevo $ref: "#/components/schemas/source-brevo" + - title: source-brex + $ref: "#/components/schemas/source-brex" - title: source-bugsnag $ref: "#/components/schemas/source-bugsnag" - title: source-buildkite $ref: "#/components/schemas/source-buildkite" + - title: source-bunny-inc + $ref: "#/components/schemas/source-bunny-inc" - title: source-buzzsprout $ref: "#/components/schemas/source-buzzsprout" + - title: source-cal-com + $ref: "#/components/schemas/source-cal-com" - title: source-calendly $ref: "#/components/schemas/source-calendly" - title: source-callrail $ref: "#/components/schemas/source-callrail" - title: source-campaign-monitor $ref: "#/components/schemas/source-campaign-monitor" + - title: source-campayn + $ref: "#/components/schemas/source-campayn" - title: source-canny $ref: "#/components/schemas/source-canny" + - title: source-capsule-crm + $ref: "#/components/schemas/source-capsule-crm" - title: source-care-quality-commission $ref: "#/components/schemas/source-care-quality-commission" - title: source-cart @@ -72656,6 +77977,8 @@ components: $ref: "#/components/schemas/source-chartmogul" - title: source-cimis $ref: "#/components/schemas/source-cimis" + - title: source-cin7 + $ref: "#/components/schemas/source-cin7" - title: source-circa $ref: "#/components/schemas/source-circa" - title: source-circleci @@ -72670,8 +77993,12 @@ components: $ref: "#/components/schemas/source-clickup-api" - title: source-clockify $ref: "#/components/schemas/source-clockify" + - title: source-clockodo + $ref: "#/components/schemas/source-clockodo" - title: source-close-com $ref: "#/components/schemas/source-close-com" + - title: source-cloudbeds + $ref: "#/components/schemas/source-cloudbeds" - title: source-coassemble $ref: "#/components/schemas/source-coassemble" - title: source-coda @@ -72706,6 +78033,8 @@ components: $ref: "#/components/schemas/source-dbt" - title: source-delighted $ref: "#/components/schemas/source-delighted" + - title: source-deputy + $ref: "#/components/schemas/source-deputy" - title: source-dixa $ref: "#/components/schemas/source-dixa" - title: source-dockerhub @@ -72720,16 +78049,26 @@ components: $ref: "#/components/schemas/source-dropbox-sign" - title: source-dynamodb $ref: "#/components/schemas/source-dynamodb" + - title: source-e-conomic + $ref: "#/components/schemas/source-e-conomic" - title: source-easypost $ref: "#/components/schemas/source-easypost" - title: source-easypromos $ref: "#/components/schemas/source-easypromos" + - title: source-elasticemail + $ref: "#/components/schemas/source-elasticemail" - title: source-emailoctopus $ref: "#/components/schemas/source-emailoctopus" - title: source-employment-hero $ref: "#/components/schemas/source-employment-hero" + - title: source-encharge + $ref: "#/components/schemas/source-encharge" - title: source-eventbrite $ref: "#/components/schemas/source-eventbrite" + - title: source-eventee + $ref: "#/components/schemas/source-eventee" + - title: source-eventzilla + $ref: "#/components/schemas/source-eventzilla" - title: source-exchange-rates $ref: "#/components/schemas/source-exchange-rates" - title: source-ezofficeinventory @@ -72744,16 +78083,38 @@ components: $ref: "#/components/schemas/source-fauna" - title: source-file $ref: "#/components/schemas/source-file" + - title: source-fillout + $ref: "#/components/schemas/source-fillout" + - title: source-finage + $ref: "#/components/schemas/source-finage" + - title: source-financial-modelling + $ref: "#/components/schemas/source-financial-modelling" + - title: source-finnhub + $ref: "#/components/schemas/source-finnhub" + - title: source-finnworlds + $ref: "#/components/schemas/source-finnworlds" - title: source-firebolt $ref: "#/components/schemas/source-firebolt" + - title: source-firehydrant + $ref: "#/components/schemas/source-firehydrant" - title: source-fleetio $ref: "#/components/schemas/source-fleetio" + - title: source-flexmail + $ref: "#/components/schemas/source-flexmail" - title: source-flexport $ref: "#/components/schemas/source-flexport" - title: source-float $ref: "#/components/schemas/source-float" + - title: source-flowlu + $ref: "#/components/schemas/source-flowlu" + - title: source-formbricks + $ref: "#/components/schemas/source-formbricks" - title: source-free-agent-connector $ref: "#/components/schemas/source-free-agent-connector" + - title: source-freightview + $ref: "#/components/schemas/source-freightview" + - title: source-freshbooks + $ref: "#/components/schemas/source-freshbooks" - title: source-freshcaller $ref: "#/components/schemas/source-freshcaller" - title: source-freshchat @@ -72766,12 +78127,18 @@ components: $ref: "#/components/schemas/source-freshservice" - title: source-front $ref: "#/components/schemas/source-front" + - title: source-fulcrum + $ref: "#/components/schemas/source-fulcrum" - title: source-gainsight-px $ref: "#/components/schemas/source-gainsight-px" - title: source-gcs $ref: "#/components/schemas/source-gcs" + - title: source-getgist + $ref: "#/components/schemas/source-getgist" - title: source-getlago $ref: "#/components/schemas/source-getlago" + - title: source-gitbook + $ref: "#/components/schemas/source-gitbook" - title: source-github $ref: "#/components/schemas/source-github" - title: source-gitlab @@ -72794,10 +78161,14 @@ components: $ref: "#/components/schemas/source-google-analytics-data-api" - title: source-google-calendar $ref: "#/components/schemas/source-google-calendar" + - title: source-google-classroom + $ref: "#/components/schemas/source-google-classroom" - title: source-google-directory $ref: "#/components/schemas/source-google-directory" - title: source-google-drive $ref: "#/components/schemas/source-google-drive" + - title: source-google-forms + $ref: "#/components/schemas/source-google-forms" - title: source-google-pagespeed-insights $ref: "#/components/schemas/source-google-pagespeed-insights" - title: source-google-search-console @@ -72832,10 +78203,14 @@ components: $ref: "#/components/schemas/source-hubplanner" - title: source-hubspot $ref: "#/components/schemas/source-hubspot" + - title: source-humanitix + $ref: "#/components/schemas/source-humanitix" - title: source-illumina-basespace $ref: "#/components/schemas/source-illumina-basespace" - title: source-incident-io $ref: "#/components/schemas/source-incident-io" + - title: source-inflowinventory + $ref: "#/components/schemas/source-inflowinventory" - title: source-insightly $ref: "#/components/schemas/source-insightly" - title: source-instagram @@ -72846,12 +78221,16 @@ components: $ref: "#/components/schemas/source-intercom" - title: source-invoiced $ref: "#/components/schemas/source-invoiced" + - title: source-invoiceninja + $ref: "#/components/schemas/source-invoiceninja" - title: source-ip2whois $ref: "#/components/schemas/source-ip2whois" - title: source-iterable $ref: "#/components/schemas/source-iterable" - title: source-jira $ref: "#/components/schemas/source-jira" + - title: source-jobnimbus + $ref: "#/components/schemas/source-jobnimbus" - title: source-jotform $ref: "#/components/schemas/source-jotform" - title: source-just-sift @@ -72880,8 +78259,12 @@ components: $ref: "#/components/schemas/source-leadfeeder" - title: source-lemlist $ref: "#/components/schemas/source-lemlist" + - title: source-less-annoying-crm + $ref: "#/components/schemas/source-less-annoying-crm" - title: source-lever-hiring $ref: "#/components/schemas/source-lever-hiring" + - title: source-lightspeed-retail + $ref: "#/components/schemas/source-lightspeed-retail" - title: source-linkedin-ads $ref: "#/components/schemas/source-linkedin-ads" - title: source-linkedin-pages @@ -72906,8 +78289,14 @@ components: $ref: "#/components/schemas/source-mailjet-mail" - title: source-mailjet-sms $ref: "#/components/schemas/source-mailjet-sms" + - title: source-mailosaur + $ref: "#/components/schemas/source-mailosaur" + - title: source-mailtrap + $ref: "#/components/schemas/source-mailtrap" - title: source-marketo $ref: "#/components/schemas/source-marketo" + - title: source-marketstack + $ref: "#/components/schemas/source-marketstack" - title: source-mention $ref: "#/components/schemas/source-mention" - title: source-metabase @@ -72954,10 +78343,16 @@ components: $ref: "#/components/schemas/source-netsuite" - title: source-news-api $ref: "#/components/schemas/source-news-api" + - title: source-newsdata-io + $ref: "#/components/schemas/source-newsdata-io" + - title: source-nocrm + $ref: "#/components/schemas/source-nocrm" - title: source-northpass-lms $ref: "#/components/schemas/source-northpass-lms" - title: source-notion $ref: "#/components/schemas/source-notion" + - title: source-nutshell + $ref: "#/components/schemas/source-nutshell" - title: source-nylas $ref: "#/components/schemas/source-nylas" - title: source-nytimes @@ -72966,12 +78361,24 @@ components: $ref: "#/components/schemas/source-okta" - title: source-omnisend $ref: "#/components/schemas/source-omnisend" + - title: source-oncehub + $ref: "#/components/schemas/source-oncehub" + - title: source-onepagecrm + $ref: "#/components/schemas/source-onepagecrm" - title: source-onesignal $ref: "#/components/schemas/source-onesignal" + - title: source-onfleet + $ref: "#/components/schemas/source-onfleet" - title: source-open-data-dc $ref: "#/components/schemas/source-open-data-dc" + - title: source-openaq + $ref: "#/components/schemas/source-openaq" + - title: source-openfda + $ref: "#/components/schemas/source-openfda" - title: source-openweather $ref: "#/components/schemas/source-openweather" + - title: source-opinion-stage + $ref: "#/components/schemas/source-opinion-stage" - title: source-opsgenie $ref: "#/components/schemas/source-opsgenie" - title: source-oracle @@ -72986,8 +78393,16 @@ components: $ref: "#/components/schemas/source-outbrain-amplify" - title: source-outreach $ref: "#/components/schemas/source-outreach" + - title: source-oveit + $ref: "#/components/schemas/source-oveit" + - title: source-pabbly-subscriptions-billing + $ref: "#/components/schemas/source-pabbly-subscriptions-billing" - title: source-pandadoc $ref: "#/components/schemas/source-pandadoc" + - title: source-paperform + $ref: "#/components/schemas/source-paperform" + - title: source-papersign + $ref: "#/components/schemas/source-papersign" - title: source-pardot $ref: "#/components/schemas/source-pardot" - title: source-paypal-transaction @@ -73010,6 +78425,8 @@ components: $ref: "#/components/schemas/source-pinterest" - title: source-pipedrive $ref: "#/components/schemas/source-pipedrive" + - title: source-pipeliner + $ref: "#/components/schemas/source-pipeliner" - title: source-pivotal-tracker $ref: "#/components/schemas/source-pivotal-tracker" - title: source-piwik @@ -73034,6 +78451,8 @@ components: $ref: "#/components/schemas/source-postmarkapp" - title: source-prestashop $ref: "#/components/schemas/source-prestashop" + - title: source-pretix + $ref: "#/components/schemas/source-pretix" - title: source-primetric $ref: "#/components/schemas/source-primetric" - title: source-productboard @@ -73066,26 +78485,36 @@ components: $ref: "#/components/schemas/source-referralhero" - title: source-rentcast $ref: "#/components/schemas/source-rentcast" + - title: source-repairshopr + $ref: "#/components/schemas/source-repairshopr" - title: source-reply-io $ref: "#/components/schemas/source-reply-io" - title: source-retently $ref: "#/components/schemas/source-retently" - title: source-revenuecat $ref: "#/components/schemas/source-revenuecat" + - title: source-revolut-merchant + $ref: "#/components/schemas/source-revolut-merchant" - title: source-rki-covid $ref: "#/components/schemas/source-rki-covid" + - title: source-rocketlane + $ref: "#/components/schemas/source-rocketlane" - title: source-rollbar $ref: "#/components/schemas/source-rollbar" - title: source-rootly $ref: "#/components/schemas/source-rootly" - title: source-rss $ref: "#/components/schemas/source-rss" + - title: source-ruddr + $ref: "#/components/schemas/source-ruddr" - title: source-s3 $ref: "#/components/schemas/source-s3" - title: source-safetyculture $ref: "#/components/schemas/source-safetyculture" - title: source-sage-hr $ref: "#/components/schemas/source-sage-hr" + - title: source-salesflare + $ref: "#/components/schemas/source-salesflare" - title: source-salesforce $ref: "#/components/schemas/source-salesforce" - title: source-salesloft @@ -73104,6 +78533,10 @@ components: $ref: "#/components/schemas/source-sendgrid" - title: source-sendinblue $ref: "#/components/schemas/source-sendinblue" + - title: source-sendowl + $ref: "#/components/schemas/source-sendowl" + - title: source-sendpulse + $ref: "#/components/schemas/source-sendpulse" - title: source-senseforce $ref: "#/components/schemas/source-senseforce" - title: source-sentry @@ -73114,6 +78547,8 @@ components: $ref: "#/components/schemas/source-sftp-bulk" - title: source-sharetribe $ref: "#/components/schemas/source-sharetribe" + - title: source-shippo + $ref: "#/components/schemas/source-shippo" - title: source-shopify $ref: "#/components/schemas/source-shopify" - title: source-shortcut @@ -73122,6 +78557,8 @@ components: $ref: "#/components/schemas/source-shortio" - title: source-sigma-computing $ref: "#/components/schemas/source-sigma-computing" + - title: source-simfin + $ref: "#/components/schemas/source-simfin" - title: source-simplecast $ref: "#/components/schemas/source-simplecast" - title: source-simplesat @@ -73132,6 +78569,8 @@ components: $ref: "#/components/schemas/source-smaily" - title: source-smartengage $ref: "#/components/schemas/source-smartengage" + - title: source-smartreach + $ref: "#/components/schemas/source-smartreach" - title: source-smartsheets $ref: "#/components/schemas/source-smartsheets" - title: source-smartwaiver @@ -73150,6 +78589,8 @@ components: $ref: "#/components/schemas/source-sparkpost" - title: source-split-io $ref: "#/components/schemas/source-split-io" + - title: source-spotlercrm + $ref: "#/components/schemas/source-spotlercrm" - title: source-square $ref: "#/components/schemas/source-square" - title: source-squarespace @@ -73158,6 +78599,8 @@ components: $ref: "#/components/schemas/source-statsig" - title: source-statuspage $ref: "#/components/schemas/source-statuspage" + - title: source-stockdata + $ref: "#/components/schemas/source-stockdata" - title: source-strava $ref: "#/components/schemas/source-strava" - title: source-stripe @@ -73168,6 +78611,10 @@ components: $ref: "#/components/schemas/source-surveymonkey" - title: source-survicate $ref: "#/components/schemas/source-survicate" + - title: source-systeme + $ref: "#/components/schemas/source-systeme" + - title: source-taboola + $ref: "#/components/schemas/source-taboola" - title: source-teamtailor $ref: "#/components/schemas/source-teamtailor" - title: source-teamwork @@ -73182,14 +78629,22 @@ components: $ref: "#/components/schemas/source-thinkific" - title: source-ticketmaster $ref: "#/components/schemas/source-ticketmaster" + - title: source-tickettailor + $ref: "#/components/schemas/source-tickettailor" - title: source-tiktok-marketing $ref: "#/components/schemas/source-tiktok-marketing" - title: source-timely $ref: "#/components/schemas/source-timely" + - title: source-tinyemail + $ref: "#/components/schemas/source-tinyemail" - title: source-todoist $ref: "#/components/schemas/source-todoist" + - title: source-track-pms + $ref: "#/components/schemas/source-track-pms" - title: source-trello $ref: "#/components/schemas/source-trello" + - title: source-tremendous + $ref: "#/components/schemas/source-tremendous" - title: source-trustpilot $ref: "#/components/schemas/source-trustpilot" - title: source-tvmaze-schedule @@ -73204,6 +78659,8 @@ components: $ref: "#/components/schemas/source-twitter" - title: source-typeform $ref: "#/components/schemas/source-typeform" + - title: source-ubidots + $ref: "#/components/schemas/source-ubidots" - title: source-unleash $ref: "#/components/schemas/source-unleash" - title: source-uppromote @@ -73216,6 +78673,8 @@ components: $ref: "#/components/schemas/source-vantage" - title: source-veeqo $ref: "#/components/schemas/source-veeqo" + - title: source-vercel + $ref: "#/components/schemas/source-vercel" - title: source-visma-economic $ref: "#/components/schemas/source-visma-economic" - title: source-vitally @@ -73228,6 +78687,8 @@ components: $ref: "#/components/schemas/source-wasabi-stats-api" - title: source-weatherstack $ref: "#/components/schemas/source-weatherstack" + - title: source-web-scrapper + $ref: "#/components/schemas/source-web-scrapper" - title: source-webflow $ref: "#/components/schemas/source-webflow" - title: source-when-i-work @@ -73248,6 +78709,8 @@ components: $ref: "#/components/schemas/source-workramp" - title: source-wrike $ref: "#/components/schemas/source-wrike" + - title: source-wufoo + $ref: "#/components/schemas/source-wufoo" - title: source-xkcd $ref: "#/components/schemas/source-xkcd" - title: source-xsolla @@ -73262,6 +78725,8 @@ components: $ref: "#/components/schemas/source-you-need-a-budget-ynab" - title: source-youtube-analytics $ref: "#/components/schemas/source-youtube-analytics" + - title: source-youtube-data + $ref: "#/components/schemas/source-youtube-data" - title: source-zapier-supported-storage $ref: "#/components/schemas/source-zapier-supported-storage" - title: source-zendesk-chat @@ -73276,12 +78741,28 @@ components: $ref: "#/components/schemas/source-zenefits" - title: source-zenloop $ref: "#/components/schemas/source-zenloop" + - title: source-zoho-analytics-metadata-api + $ref: "#/components/schemas/source-zoho-analytics-metadata-api" + - title: source-zoho-bigin + $ref: "#/components/schemas/source-zoho-bigin" + - title: source-zoho-billing + $ref: "#/components/schemas/source-zoho-billing" - title: source-zoho-books $ref: "#/components/schemas/source-zoho-books" + - title: source-zoho-campaign + $ref: "#/components/schemas/source-zoho-campaign" - title: source-zoho-crm $ref: "#/components/schemas/source-zoho-crm" + - title: source-zoho-desk + $ref: "#/components/schemas/source-zoho-desk" + - title: source-zoho-expense + $ref: "#/components/schemas/source-zoho-expense" - title: source-zoho-inventory $ref: "#/components/schemas/source-zoho-inventory" + - title: source-zoho-invoice + $ref: "#/components/schemas/source-zoho-invoice" + - title: source-zonka-feedback + $ref: "#/components/schemas/source-zonka-feedback" - title: source-zoom $ref: "#/components/schemas/source-zoom" DestinationConfiguration: @@ -73430,8 +78911,6 @@ components: $ref: "#/components/schemas/pinterest" - title: rd-station-marketing $ref: "#/components/schemas/rd-station-marketing" - - title: retently - $ref: "#/components/schemas/retently" - title: salesforce $ref: "#/components/schemas/salesforce" - title: shopify @@ -73440,6 +78919,8 @@ components: $ref: "#/components/schemas/slack" - title: smartsheets $ref: "#/components/schemas/smartsheets" + - title: snapchat-marketing + $ref: "#/components/schemas/snapchat-marketing" - title: snowflake $ref: "#/components/schemas/snowflake" - title: surveymonkey @@ -73454,8 +78935,6 @@ components: $ref: "#/components/schemas/youtube-analytics" - title: zendesk-chat $ref: "#/components/schemas/zendesk-chat" - - title: zendesk-sunshine - $ref: "#/components/schemas/zendesk-sunshine" - title: zendesk-support $ref: "#/components/schemas/zendesk-support" - title: zendesk-talk diff --git a/airbyte-api/server-api/src/main/openapi/api_terraform.yaml b/airbyte-api/server-api/src/main/openapi/api_terraform.yaml index 2844dc06c92..f69b38ccf5a 100644 --- a/airbyte-api/server-api/src/main/openapi/api_terraform.yaml +++ b/airbyte-api/server-api/src/main/openapi/api_terraform.yaml @@ -2,7 +2,7 @@ openapi: "3.1.0" info: title: "airbyte-api" version: "1.0.0" - description: "Programatically control Airbyte Cloud, OSS & Enterprise." + description: "Programmatically control Airbyte Cloud, OSS & Enterprise." servers: - url: "https://api.airbyte.com/v1" description: "Airbyte API v1" @@ -1627,6 +1627,92 @@ paths: type: "string" in: "path" required: true + /sources#Agilecrm: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAgilecrmCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAgilecrm" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Agilecrm#create + /sources/{sourceId}#Agilecrm: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAgilecrm" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Agilecrm#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAgilecrmPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAgilecrm" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Agilecrm#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAgilecrm" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Agilecrm#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Aha: post: requestBody: @@ -1971,6 +2057,92 @@ paths: type: "string" in: "path" required: true + /sources#Akeneo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAkeneoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAkeneo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Akeneo#create + /sources/{sourceId}#Akeneo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAkeneo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Akeneo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAkeneoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAkeneo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Akeneo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAkeneo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Akeneo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Algolia: post: requestBody: @@ -2831,6 +3003,92 @@ paths: type: "string" in: "path" required: true + /sources#Apptivo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceApptivoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceApptivo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Apptivo#create + /sources/{sourceId}#Apptivo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceApptivo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Apptivo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceApptivoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceApptivo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Apptivo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceApptivo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Apptivo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Asana: post: requestBody: @@ -3605,6 +3863,92 @@ paths: type: "string" in: "path" required: true + /sources#Bigmailer: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBigmailerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBigmailer" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bigmailer#create + /sources/{sourceId}#Bigmailer: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBigmailer" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bigmailer#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBigmailerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBigmailer" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bigmailer#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBigmailer" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bigmailer#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Bigquery: post: requestBody: @@ -3863,6 +4207,178 @@ paths: type: "string" in: "path" required: true + /sources#Blogger: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBloggerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBlogger" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Blogger#create + /sources/{sourceId}#Blogger: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBlogger" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Blogger#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBloggerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBlogger" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Blogger#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBlogger" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Blogger#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Box: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBoxCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBox" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Box#create + /sources/{sourceId}#Box: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBox" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Box#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBoxPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBox" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Box#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBox" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Box#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Braintree: post: requestBody: @@ -4207,6 +4723,92 @@ paths: type: "string" in: "path" required: true + /sources#Brex: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBrexCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBrex" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Brex#create + /sources/{sourceId}#Brex: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBrex" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Brex#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBrexPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBrex" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Brex#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBrex" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Brex#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Bugsnag: post: requestBody: @@ -4379,6 +4981,92 @@ paths: type: "string" in: "path" required: true + /sources#BunnyInc: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBunnyIncCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBunnyInc" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BunnyInc#create + /sources/{sourceId}#BunnyInc: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBunnyInc" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BunnyInc#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBunnyIncPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBunnyInc" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BunnyInc#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBunnyInc" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BunnyInc#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Buzzsprout: post: requestBody: @@ -4465,6 +5153,92 @@ paths: type: "string" in: "path" required: true + /sources#CalCom: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCalComCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCalCom" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CalCom#create + /sources/{sourceId}#CalCom: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCalCom" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CalCom#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCalComPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCalCom" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CalCom#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCalCom" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CalCom#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Calendly: post: requestBody: @@ -4723,6 +5497,92 @@ paths: type: "string" in: "path" required: true + /sources#Campayn: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCampaynCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCampayn" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Campayn#create + /sources/{sourceId}#Campayn: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCampayn" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Campayn#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCampaynPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCampayn" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Campayn#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCampayn" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Campayn#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Canny: post: requestBody: @@ -4809,6 +5669,92 @@ paths: type: "string" in: "path" required: true + /sources#CapsuleCrm: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCapsuleCrmCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCapsuleCrm" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CapsuleCrm#create + /sources/{sourceId}#CapsuleCrm: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCapsuleCrm" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CapsuleCrm#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCapsuleCrmPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCapsuleCrm" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CapsuleCrm#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCapsuleCrm" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CapsuleCrm#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#CareQualityCommission: post: requestBody: @@ -5583,6 +6529,92 @@ paths: type: "string" in: "path" required: true + /sources#Cin7: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCin7CreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCin7" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cin7#create + /sources/{sourceId}#Cin7: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCin7" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cin7#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCin7PutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCin7" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cin7#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCin7" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cin7#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Circa: post: requestBody: @@ -6185,6 +7217,92 @@ paths: type: "string" in: "path" required: true + /sources#Clockodo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClockodoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceClockodo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clockodo#create + /sources/{sourceId}#Clockodo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceClockodo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clockodo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClockodoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceClockodo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clockodo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceClockodo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clockodo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#CloseCom: post: requestBody: @@ -6271,6 +7389,92 @@ paths: type: "string" in: "path" required: true + /sources#Cloudbeds: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCloudbedsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCloudbeds" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cloudbeds#create + /sources/{sourceId}#Cloudbeds: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCloudbeds" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cloudbeds#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCloudbedsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCloudbeds" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cloudbeds#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCloudbeds" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cloudbeds#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Coassemble: post: requestBody: @@ -7733,6 +8937,92 @@ paths: type: "string" in: "path" required: true + /sources#Deputy: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDeputyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDeputy" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Deputy#create + /sources/{sourceId}#Deputy: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDeputy" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Deputy#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDeputyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDeputy" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Deputy#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDeputy" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Deputy#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Dixa: post: requestBody: @@ -8335,6 +9625,92 @@ paths: type: "string" in: "path" required: true + /sources#EConomic: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEConomicCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceEConomic" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_EConomic#create + /sources/{sourceId}#EConomic: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceEConomic" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_EConomic#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEConomicPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceEConomic" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_EConomic#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceEConomic" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_EConomic#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Easypost: post: requestBody: @@ -8507,6 +9883,92 @@ paths: type: "string" in: "path" required: true + /sources#Elasticemail: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceElasticemailCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceElasticemail" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Elasticemail#create + /sources/{sourceId}#Elasticemail: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceElasticemail" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Elasticemail#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceElasticemailPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceElasticemail" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Elasticemail#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceElasticemail" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Elasticemail#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Emailoctopus: post: requestBody: @@ -8679,6 +10141,92 @@ paths: type: "string" in: "path" required: true + /sources#Encharge: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEnchargeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceEncharge" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Encharge#create + /sources/{sourceId}#Encharge: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceEncharge" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Encharge#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEnchargePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceEncharge" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Encharge#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceEncharge" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Encharge#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Eventbrite: post: requestBody: @@ -8765,6 +10313,178 @@ paths: type: "string" in: "path" required: true + /sources#Eventee: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEventeeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceEventee" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventee#create + /sources/{sourceId}#Eventee: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceEventee" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventee#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEventeePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceEventee" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventee#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceEventee" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventee#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Eventzilla: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEventzillaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceEventzilla" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventzilla#create + /sources/{sourceId}#Eventzilla: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceEventzilla" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventzilla#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEventzillaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceEventzilla" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventzilla#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceEventzilla" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventzilla#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#ExchangeRates: post: requestBody: @@ -9367,13 +11087,13 @@ paths: type: "string" in: "path" required: true - /sources#Firebolt: + /sources#Fillout: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFireboltCreateRequest" + $ref: "#/components/schemas/SourceFilloutCreateRequest" tags: - "Sources" responses: @@ -9387,14 +11107,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFirebolt" + operationId: "createSourceFillout" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Firebolt#create - /sources/{sourceId}#Firebolt: + x-speakeasy-entity-operation: Source_Fillout#create + /sources/{sourceId}#Fillout: get: tags: - "Sources" @@ -9409,10 +11129,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFirebolt" + operationId: "getSourceFillout" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Firebolt#read + x-speakeasy-entity-operation: Source_Fillout#read put: tags: - "Sources" @@ -9420,7 +11140,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFireboltPutRequest" + $ref: "#/components/schemas/SourceFilloutPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -9428,10 +11148,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFirebolt" + operationId: "putSourceFillout" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Firebolt#update + x-speakeasy-entity-operation: Source_Fillout#update delete: tags: - "Sources" @@ -9442,10 +11162,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFirebolt" + operationId: "deleteSourceFillout" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Firebolt#delete + x-speakeasy-entity-operation: Source_Fillout#delete parameters: - name: "sourceId" schema: @@ -9453,13 +11173,13 @@ paths: type: "string" in: "path" required: true - /sources#Fleetio: + /sources#Finage: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFleetioCreateRequest" + $ref: "#/components/schemas/SourceFinageCreateRequest" tags: - "Sources" responses: @@ -9473,14 +11193,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFleetio" + operationId: "createSourceFinage" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Fleetio#create - /sources/{sourceId}#Fleetio: + x-speakeasy-entity-operation: Source_Finage#create + /sources/{sourceId}#Finage: get: tags: - "Sources" @@ -9495,10 +11215,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFleetio" + operationId: "getSourceFinage" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Fleetio#read + x-speakeasy-entity-operation: Source_Finage#read put: tags: - "Sources" @@ -9506,7 +11226,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFleetioPutRequest" + $ref: "#/components/schemas/SourceFinagePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -9514,10 +11234,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFleetio" + operationId: "putSourceFinage" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Fleetio#update + x-speakeasy-entity-operation: Source_Finage#update delete: tags: - "Sources" @@ -9528,10 +11248,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFleetio" + operationId: "deleteSourceFinage" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Fleetio#delete + x-speakeasy-entity-operation: Source_Finage#delete parameters: - name: "sourceId" schema: @@ -9539,13 +11259,13 @@ paths: type: "string" in: "path" required: true - /sources#Flexport: + /sources#FinancialModelling: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFlexportCreateRequest" + $ref: "#/components/schemas/SourceFinancialModellingCreateRequest" tags: - "Sources" responses: @@ -9559,14 +11279,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFlexport" + operationId: "createSourceFinancialModelling" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Flexport#create - /sources/{sourceId}#Flexport: + x-speakeasy-entity-operation: Source_FinancialModelling#create + /sources/{sourceId}#FinancialModelling: get: tags: - "Sources" @@ -9581,10 +11301,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFlexport" + operationId: "getSourceFinancialModelling" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Flexport#read + x-speakeasy-entity-operation: Source_FinancialModelling#read put: tags: - "Sources" @@ -9592,7 +11312,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFlexportPutRequest" + $ref: "#/components/schemas/SourceFinancialModellingPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -9600,10 +11320,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFlexport" + operationId: "putSourceFinancialModelling" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Flexport#update + x-speakeasy-entity-operation: Source_FinancialModelling#update delete: tags: - "Sources" @@ -9614,10 +11334,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFlexport" + operationId: "deleteSourceFinancialModelling" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Flexport#delete + x-speakeasy-entity-operation: Source_FinancialModelling#delete parameters: - name: "sourceId" schema: @@ -9625,13 +11345,13 @@ paths: type: "string" in: "path" required: true - /sources#Float: + /sources#Finnhub: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFloatCreateRequest" + $ref: "#/components/schemas/SourceFinnhubCreateRequest" tags: - "Sources" responses: @@ -9645,14 +11365,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFloat" + operationId: "createSourceFinnhub" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Float#create - /sources/{sourceId}#Float: + x-speakeasy-entity-operation: Source_Finnhub#create + /sources/{sourceId}#Finnhub: get: tags: - "Sources" @@ -9667,10 +11387,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFloat" + operationId: "getSourceFinnhub" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Float#read + x-speakeasy-entity-operation: Source_Finnhub#read put: tags: - "Sources" @@ -9678,7 +11398,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFloatPutRequest" + $ref: "#/components/schemas/SourceFinnhubPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -9686,10 +11406,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFloat" + operationId: "putSourceFinnhub" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Float#update + x-speakeasy-entity-operation: Source_Finnhub#update delete: tags: - "Sources" @@ -9700,10 +11420,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFloat" + operationId: "deleteSourceFinnhub" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Float#delete + x-speakeasy-entity-operation: Source_Finnhub#delete parameters: - name: "sourceId" schema: @@ -9711,13 +11431,13 @@ paths: type: "string" in: "path" required: true - /sources#FreeAgentConnector: + /sources#Finnworlds: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFreeAgentConnectorCreateRequest" + $ref: "#/components/schemas/SourceFinnworldsCreateRequest" tags: - "Sources" responses: @@ -9731,14 +11451,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFreeAgentConnector" + operationId: "createSourceFinnworlds" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_FreeAgentConnector#create - /sources/{sourceId}#FreeAgentConnector: + x-speakeasy-entity-operation: Source_Finnworlds#create + /sources/{sourceId}#Finnworlds: get: tags: - "Sources" @@ -9753,10 +11473,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFreeAgentConnector" + operationId: "getSourceFinnworlds" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_FreeAgentConnector#read + x-speakeasy-entity-operation: Source_Finnworlds#read put: tags: - "Sources" @@ -9764,7 +11484,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFreeAgentConnectorPutRequest" + $ref: "#/components/schemas/SourceFinnworldsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -9772,10 +11492,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFreeAgentConnector" + operationId: "putSourceFinnworlds" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_FreeAgentConnector#update + x-speakeasy-entity-operation: Source_Finnworlds#update delete: tags: - "Sources" @@ -9786,10 +11506,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFreeAgentConnector" + operationId: "deleteSourceFinnworlds" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_FreeAgentConnector#delete + x-speakeasy-entity-operation: Source_Finnworlds#delete parameters: - name: "sourceId" schema: @@ -9797,13 +11517,13 @@ paths: type: "string" in: "path" required: true - /sources#Freshcaller: + /sources#Firebolt: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshcallerCreateRequest" + $ref: "#/components/schemas/SourceFireboltCreateRequest" tags: - "Sources" responses: @@ -9817,14 +11537,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFreshcaller" + operationId: "createSourceFirebolt" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshcaller#create - /sources/{sourceId}#Freshcaller: + x-speakeasy-entity-operation: Source_Firebolt#create + /sources/{sourceId}#Firebolt: get: tags: - "Sources" @@ -9839,10 +11559,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFreshcaller" + operationId: "getSourceFirebolt" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshcaller#read + x-speakeasy-entity-operation: Source_Firebolt#read put: tags: - "Sources" @@ -9850,7 +11570,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshcallerPutRequest" + $ref: "#/components/schemas/SourceFireboltPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -9858,10 +11578,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFreshcaller" + operationId: "putSourceFirebolt" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshcaller#update + x-speakeasy-entity-operation: Source_Firebolt#update delete: tags: - "Sources" @@ -9872,10 +11592,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFreshcaller" + operationId: "deleteSourceFirebolt" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshcaller#delete + x-speakeasy-entity-operation: Source_Firebolt#delete parameters: - name: "sourceId" schema: @@ -9883,13 +11603,13 @@ paths: type: "string" in: "path" required: true - /sources#Freshchat: + /sources#Firehydrant: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshchatCreateRequest" + $ref: "#/components/schemas/SourceFirehydrantCreateRequest" tags: - "Sources" responses: @@ -9903,14 +11623,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFreshchat" + operationId: "createSourceFirehydrant" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshchat#create - /sources/{sourceId}#Freshchat: + x-speakeasy-entity-operation: Source_Firehydrant#create + /sources/{sourceId}#Firehydrant: get: tags: - "Sources" @@ -9925,10 +11645,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFreshchat" + operationId: "getSourceFirehydrant" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshchat#read + x-speakeasy-entity-operation: Source_Firehydrant#read put: tags: - "Sources" @@ -9936,7 +11656,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshchatPutRequest" + $ref: "#/components/schemas/SourceFirehydrantPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -9944,10 +11664,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFreshchat" + operationId: "putSourceFirehydrant" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshchat#update + x-speakeasy-entity-operation: Source_Firehydrant#update delete: tags: - "Sources" @@ -9958,10 +11678,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFreshchat" + operationId: "deleteSourceFirehydrant" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshchat#delete + x-speakeasy-entity-operation: Source_Firehydrant#delete parameters: - name: "sourceId" schema: @@ -9969,13 +11689,13 @@ paths: type: "string" in: "path" required: true - /sources#Freshdesk: + /sources#Fleetio: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshdeskCreateRequest" + $ref: "#/components/schemas/SourceFleetioCreateRequest" tags: - "Sources" responses: @@ -9989,14 +11709,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFreshdesk" + operationId: "createSourceFleetio" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshdesk#create - /sources/{sourceId}#Freshdesk: + x-speakeasy-entity-operation: Source_Fleetio#create + /sources/{sourceId}#Fleetio: get: tags: - "Sources" @@ -10011,10 +11731,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFreshdesk" + operationId: "getSourceFleetio" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshdesk#read + x-speakeasy-entity-operation: Source_Fleetio#read put: tags: - "Sources" @@ -10022,7 +11742,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshdeskPutRequest" + $ref: "#/components/schemas/SourceFleetioPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10030,10 +11750,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFreshdesk" + operationId: "putSourceFleetio" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshdesk#update + x-speakeasy-entity-operation: Source_Fleetio#update delete: tags: - "Sources" @@ -10044,10 +11764,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFreshdesk" + operationId: "deleteSourceFleetio" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshdesk#delete + x-speakeasy-entity-operation: Source_Fleetio#delete parameters: - name: "sourceId" schema: @@ -10055,13 +11775,13 @@ paths: type: "string" in: "path" required: true - /sources#Freshsales: + /sources#Flexmail: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshsalesCreateRequest" + $ref: "#/components/schemas/SourceFlexmailCreateRequest" tags: - "Sources" responses: @@ -10075,14 +11795,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFreshsales" + operationId: "createSourceFlexmail" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshsales#create - /sources/{sourceId}#Freshsales: + x-speakeasy-entity-operation: Source_Flexmail#create + /sources/{sourceId}#Flexmail: get: tags: - "Sources" @@ -10097,10 +11817,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFreshsales" + operationId: "getSourceFlexmail" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshsales#read + x-speakeasy-entity-operation: Source_Flexmail#read put: tags: - "Sources" @@ -10108,7 +11828,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshsalesPutRequest" + $ref: "#/components/schemas/SourceFlexmailPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10116,10 +11836,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFreshsales" + operationId: "putSourceFlexmail" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshsales#update + x-speakeasy-entity-operation: Source_Flexmail#update delete: tags: - "Sources" @@ -10130,10 +11850,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFreshsales" + operationId: "deleteSourceFlexmail" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshsales#delete + x-speakeasy-entity-operation: Source_Flexmail#delete parameters: - name: "sourceId" schema: @@ -10141,13 +11861,13 @@ paths: type: "string" in: "path" required: true - /sources#Freshservice: + /sources#Flexport: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshserviceCreateRequest" + $ref: "#/components/schemas/SourceFlexportCreateRequest" tags: - "Sources" responses: @@ -10161,14 +11881,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFreshservice" + operationId: "createSourceFlexport" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshservice#create - /sources/{sourceId}#Freshservice: + x-speakeasy-entity-operation: Source_Flexport#create + /sources/{sourceId}#Flexport: get: tags: - "Sources" @@ -10183,10 +11903,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFreshservice" + operationId: "getSourceFlexport" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshservice#read + x-speakeasy-entity-operation: Source_Flexport#read put: tags: - "Sources" @@ -10194,7 +11914,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFreshservicePutRequest" + $ref: "#/components/schemas/SourceFlexportPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10202,10 +11922,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFreshservice" + operationId: "putSourceFlexport" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshservice#update + x-speakeasy-entity-operation: Source_Flexport#update delete: tags: - "Sources" @@ -10216,10 +11936,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFreshservice" + operationId: "deleteSourceFlexport" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Freshservice#delete + x-speakeasy-entity-operation: Source_Flexport#delete parameters: - name: "sourceId" schema: @@ -10227,13 +11947,13 @@ paths: type: "string" in: "path" required: true - /sources#Front: + /sources#Float: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceFrontCreateRequest" + $ref: "#/components/schemas/SourceFloatCreateRequest" tags: - "Sources" responses: @@ -10247,14 +11967,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceFront" + operationId: "createSourceFloat" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Front#create - /sources/{sourceId}#Front: + x-speakeasy-entity-operation: Source_Float#create + /sources/{sourceId}#Float: get: tags: - "Sources" @@ -10269,10 +11989,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceFront" + operationId: "getSourceFloat" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Front#read + x-speakeasy-entity-operation: Source_Float#read put: tags: - "Sources" @@ -10280,7 +12000,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceFrontPutRequest" + $ref: "#/components/schemas/SourceFloatPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10288,10 +12008,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceFront" + operationId: "putSourceFloat" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Front#update + x-speakeasy-entity-operation: Source_Float#update delete: tags: - "Sources" @@ -10302,10 +12022,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceFront" + operationId: "deleteSourceFloat" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Front#delete + x-speakeasy-entity-operation: Source_Float#delete parameters: - name: "sourceId" schema: @@ -10313,13 +12033,13 @@ paths: type: "string" in: "path" required: true - /sources#GainsightPx: + /sources#Flowlu: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGainsightPxCreateRequest" + $ref: "#/components/schemas/SourceFlowluCreateRequest" tags: - "Sources" responses: @@ -10333,14 +12053,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGainsightPx" + operationId: "createSourceFlowlu" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GainsightPx#create - /sources/{sourceId}#GainsightPx: + x-speakeasy-entity-operation: Source_Flowlu#create + /sources/{sourceId}#Flowlu: get: tags: - "Sources" @@ -10355,10 +12075,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGainsightPx" + operationId: "getSourceFlowlu" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GainsightPx#read + x-speakeasy-entity-operation: Source_Flowlu#read put: tags: - "Sources" @@ -10366,7 +12086,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGainsightPxPutRequest" + $ref: "#/components/schemas/SourceFlowluPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10374,10 +12094,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGainsightPx" + operationId: "putSourceFlowlu" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GainsightPx#update + x-speakeasy-entity-operation: Source_Flowlu#update delete: tags: - "Sources" @@ -10388,10 +12108,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGainsightPx" + operationId: "deleteSourceFlowlu" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GainsightPx#delete + x-speakeasy-entity-operation: Source_Flowlu#delete parameters: - name: "sourceId" schema: @@ -10399,13 +12119,13 @@ paths: type: "string" in: "path" required: true - /sources#Gcs: + /sources#Formbricks: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGcsCreateRequest" + $ref: "#/components/schemas/SourceFormbricksCreateRequest" tags: - "Sources" responses: @@ -10419,14 +12139,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGcs" + operationId: "createSourceFormbricks" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gcs#create - /sources/{sourceId}#Gcs: + x-speakeasy-entity-operation: Source_Formbricks#create + /sources/{sourceId}#Formbricks: get: tags: - "Sources" @@ -10441,10 +12161,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGcs" + operationId: "getSourceFormbricks" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gcs#read + x-speakeasy-entity-operation: Source_Formbricks#read put: tags: - "Sources" @@ -10452,7 +12172,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGcsPutRequest" + $ref: "#/components/schemas/SourceFormbricksPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10460,10 +12180,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGcs" + operationId: "putSourceFormbricks" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gcs#update + x-speakeasy-entity-operation: Source_Formbricks#update delete: tags: - "Sources" @@ -10474,10 +12194,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGcs" + operationId: "deleteSourceFormbricks" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gcs#delete + x-speakeasy-entity-operation: Source_Formbricks#delete parameters: - name: "sourceId" schema: @@ -10485,13 +12205,13 @@ paths: type: "string" in: "path" required: true - /sources#Getlago: + /sources#FreeAgentConnector: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGetlagoCreateRequest" + $ref: "#/components/schemas/SourceFreeAgentConnectorCreateRequest" tags: - "Sources" responses: @@ -10505,14 +12225,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGetlago" + operationId: "createSourceFreeAgentConnector" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Getlago#create - /sources/{sourceId}#Getlago: + x-speakeasy-entity-operation: Source_FreeAgentConnector#create + /sources/{sourceId}#FreeAgentConnector: get: tags: - "Sources" @@ -10527,10 +12247,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGetlago" + operationId: "getSourceFreeAgentConnector" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Getlago#read + x-speakeasy-entity-operation: Source_FreeAgentConnector#read put: tags: - "Sources" @@ -10538,7 +12258,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGetlagoPutRequest" + $ref: "#/components/schemas/SourceFreeAgentConnectorPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10546,10 +12266,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGetlago" + operationId: "putSourceFreeAgentConnector" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Getlago#update + x-speakeasy-entity-operation: Source_FreeAgentConnector#update delete: tags: - "Sources" @@ -10560,10 +12280,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGetlago" + operationId: "deleteSourceFreeAgentConnector" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Getlago#delete + x-speakeasy-entity-operation: Source_FreeAgentConnector#delete parameters: - name: "sourceId" schema: @@ -10571,13 +12291,13 @@ paths: type: "string" in: "path" required: true - /sources#Github: + /sources#Freightview: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGithubCreateRequest" + $ref: "#/components/schemas/SourceFreightviewCreateRequest" tags: - "Sources" responses: @@ -10591,14 +12311,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGithub" + operationId: "createSourceFreightview" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Github#create - /sources/{sourceId}#Github: + x-speakeasy-entity-operation: Source_Freightview#create + /sources/{sourceId}#Freightview: get: tags: - "Sources" @@ -10613,10 +12333,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGithub" + operationId: "getSourceFreightview" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Github#read + x-speakeasy-entity-operation: Source_Freightview#read put: tags: - "Sources" @@ -10624,7 +12344,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGithubPutRequest" + $ref: "#/components/schemas/SourceFreightviewPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10632,10 +12352,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGithub" + operationId: "putSourceFreightview" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Github#update + x-speakeasy-entity-operation: Source_Freightview#update delete: tags: - "Sources" @@ -10646,10 +12366,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGithub" + operationId: "deleteSourceFreightview" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Github#delete + x-speakeasy-entity-operation: Source_Freightview#delete parameters: - name: "sourceId" schema: @@ -10657,13 +12377,13 @@ paths: type: "string" in: "path" required: true - /sources#Gitlab: + /sources#Freshbooks: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGitlabCreateRequest" + $ref: "#/components/schemas/SourceFreshbooksCreateRequest" tags: - "Sources" responses: @@ -10677,14 +12397,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGitlab" + operationId: "createSourceFreshbooks" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gitlab#create - /sources/{sourceId}#Gitlab: + x-speakeasy-entity-operation: Source_Freshbooks#create + /sources/{sourceId}#Freshbooks: get: tags: - "Sources" @@ -10699,10 +12419,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGitlab" + operationId: "getSourceFreshbooks" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gitlab#read + x-speakeasy-entity-operation: Source_Freshbooks#read put: tags: - "Sources" @@ -10710,7 +12430,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGitlabPutRequest" + $ref: "#/components/schemas/SourceFreshbooksPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10718,10 +12438,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGitlab" + operationId: "putSourceFreshbooks" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gitlab#update + x-speakeasy-entity-operation: Source_Freshbooks#update delete: tags: - "Sources" @@ -10732,10 +12452,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGitlab" + operationId: "deleteSourceFreshbooks" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gitlab#delete + x-speakeasy-entity-operation: Source_Freshbooks#delete parameters: - name: "sourceId" schema: @@ -10743,13 +12463,13 @@ paths: type: "string" in: "path" required: true - /sources#Glassfrog: + /sources#Freshcaller: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGlassfrogCreateRequest" + $ref: "#/components/schemas/SourceFreshcallerCreateRequest" tags: - "Sources" responses: @@ -10763,14 +12483,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGlassfrog" + operationId: "createSourceFreshcaller" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Glassfrog#create - /sources/{sourceId}#Glassfrog: + x-speakeasy-entity-operation: Source_Freshcaller#create + /sources/{sourceId}#Freshcaller: get: tags: - "Sources" @@ -10785,10 +12505,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGlassfrog" + operationId: "getSourceFreshcaller" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Glassfrog#read + x-speakeasy-entity-operation: Source_Freshcaller#read put: tags: - "Sources" @@ -10796,7 +12516,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGlassfrogPutRequest" + $ref: "#/components/schemas/SourceFreshcallerPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10804,10 +12524,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGlassfrog" + operationId: "putSourceFreshcaller" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Glassfrog#update + x-speakeasy-entity-operation: Source_Freshcaller#update delete: tags: - "Sources" @@ -10818,10 +12538,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGlassfrog" + operationId: "deleteSourceFreshcaller" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Glassfrog#delete + x-speakeasy-entity-operation: Source_Freshcaller#delete parameters: - name: "sourceId" schema: @@ -10829,13 +12549,13 @@ paths: type: "string" in: "path" required: true - /sources#Gmail: + /sources#Freshchat: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGmailCreateRequest" + $ref: "#/components/schemas/SourceFreshchatCreateRequest" tags: - "Sources" responses: @@ -10849,14 +12569,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGmail" + operationId: "createSourceFreshchat" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gmail#create - /sources/{sourceId}#Gmail: + x-speakeasy-entity-operation: Source_Freshchat#create + /sources/{sourceId}#Freshchat: get: tags: - "Sources" @@ -10871,10 +12591,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGmail" + operationId: "getSourceFreshchat" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gmail#read + x-speakeasy-entity-operation: Source_Freshchat#read put: tags: - "Sources" @@ -10882,7 +12602,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGmailPutRequest" + $ref: "#/components/schemas/SourceFreshchatPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10890,10 +12610,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGmail" + operationId: "putSourceFreshchat" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gmail#update + x-speakeasy-entity-operation: Source_Freshchat#update delete: tags: - "Sources" @@ -10904,10 +12624,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGmail" + operationId: "deleteSourceFreshchat" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gmail#delete + x-speakeasy-entity-operation: Source_Freshchat#delete parameters: - name: "sourceId" schema: @@ -10915,13 +12635,13 @@ paths: type: "string" in: "path" required: true - /sources#Gnews: + /sources#Freshdesk: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGnewsCreateRequest" + $ref: "#/components/schemas/SourceFreshdeskCreateRequest" tags: - "Sources" responses: @@ -10935,14 +12655,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGnews" + operationId: "createSourceFreshdesk" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gnews#create - /sources/{sourceId}#Gnews: + x-speakeasy-entity-operation: Source_Freshdesk#create + /sources/{sourceId}#Freshdesk: get: tags: - "Sources" @@ -10957,10 +12677,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGnews" + operationId: "getSourceFreshdesk" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gnews#read + x-speakeasy-entity-operation: Source_Freshdesk#read put: tags: - "Sources" @@ -10968,7 +12688,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGnewsPutRequest" + $ref: "#/components/schemas/SourceFreshdeskPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -10976,10 +12696,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGnews" + operationId: "putSourceFreshdesk" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gnews#update + x-speakeasy-entity-operation: Source_Freshdesk#update delete: tags: - "Sources" @@ -10990,10 +12710,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGnews" + operationId: "deleteSourceFreshdesk" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gnews#delete + x-speakeasy-entity-operation: Source_Freshdesk#delete parameters: - name: "sourceId" schema: @@ -11001,13 +12721,13 @@ paths: type: "string" in: "path" required: true - /sources#Gocardless: + /sources#Freshsales: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGocardlessCreateRequest" + $ref: "#/components/schemas/SourceFreshsalesCreateRequest" tags: - "Sources" responses: @@ -11021,14 +12741,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGocardless" + operationId: "createSourceFreshsales" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gocardless#create - /sources/{sourceId}#Gocardless: + x-speakeasy-entity-operation: Source_Freshsales#create + /sources/{sourceId}#Freshsales: get: tags: - "Sources" @@ -11043,10 +12763,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGocardless" + operationId: "getSourceFreshsales" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gocardless#read + x-speakeasy-entity-operation: Source_Freshsales#read put: tags: - "Sources" @@ -11054,7 +12774,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGocardlessPutRequest" + $ref: "#/components/schemas/SourceFreshsalesPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11062,10 +12782,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGocardless" + operationId: "putSourceFreshsales" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gocardless#update + x-speakeasy-entity-operation: Source_Freshsales#update delete: tags: - "Sources" @@ -11076,10 +12796,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGocardless" + operationId: "deleteSourceFreshsales" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gocardless#delete + x-speakeasy-entity-operation: Source_Freshsales#delete parameters: - name: "sourceId" schema: @@ -11087,13 +12807,13 @@ paths: type: "string" in: "path" required: true - /sources#Goldcast: + /sources#Freshservice: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoldcastCreateRequest" + $ref: "#/components/schemas/SourceFreshserviceCreateRequest" tags: - "Sources" responses: @@ -11107,14 +12827,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoldcast" + operationId: "createSourceFreshservice" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Goldcast#create - /sources/{sourceId}#Goldcast: + x-speakeasy-entity-operation: Source_Freshservice#create + /sources/{sourceId}#Freshservice: get: tags: - "Sources" @@ -11129,10 +12849,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoldcast" + operationId: "getSourceFreshservice" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Goldcast#read + x-speakeasy-entity-operation: Source_Freshservice#read put: tags: - "Sources" @@ -11140,7 +12860,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoldcastPutRequest" + $ref: "#/components/schemas/SourceFreshservicePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11148,10 +12868,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoldcast" + operationId: "putSourceFreshservice" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Goldcast#update + x-speakeasy-entity-operation: Source_Freshservice#update delete: tags: - "Sources" @@ -11162,10 +12882,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoldcast" + operationId: "deleteSourceFreshservice" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Goldcast#delete + x-speakeasy-entity-operation: Source_Freshservice#delete parameters: - name: "sourceId" schema: @@ -11173,13 +12893,13 @@ paths: type: "string" in: "path" required: true - /sources#Gong: + /sources#Front: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGongCreateRequest" + $ref: "#/components/schemas/SourceFrontCreateRequest" tags: - "Sources" responses: @@ -11193,14 +12913,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGong" + operationId: "createSourceFront" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gong#create - /sources/{sourceId}#Gong: + x-speakeasy-entity-operation: Source_Front#create + /sources/{sourceId}#Front: get: tags: - "Sources" @@ -11215,10 +12935,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGong" + operationId: "getSourceFront" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gong#read + x-speakeasy-entity-operation: Source_Front#read put: tags: - "Sources" @@ -11226,7 +12946,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGongPutRequest" + $ref: "#/components/schemas/SourceFrontPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11234,10 +12954,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGong" + operationId: "putSourceFront" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gong#update + x-speakeasy-entity-operation: Source_Front#update delete: tags: - "Sources" @@ -11248,10 +12968,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGong" + operationId: "deleteSourceFront" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gong#delete + x-speakeasy-entity-operation: Source_Front#delete parameters: - name: "sourceId" schema: @@ -11259,13 +12979,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleAds: + /sources#Fulcrum: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleAdsCreateRequest" + $ref: "#/components/schemas/SourceFulcrumCreateRequest" tags: - "Sources" responses: @@ -11279,14 +12999,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleAds" + operationId: "createSourceFulcrum" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleAds#create - /sources/{sourceId}#GoogleAds: + x-speakeasy-entity-operation: Source_Fulcrum#create + /sources/{sourceId}#Fulcrum: get: tags: - "Sources" @@ -11301,10 +13021,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleAds" + operationId: "getSourceFulcrum" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleAds#read + x-speakeasy-entity-operation: Source_Fulcrum#read put: tags: - "Sources" @@ -11312,7 +13032,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleAdsPutRequest" + $ref: "#/components/schemas/SourceFulcrumPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11320,10 +13040,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleAds" + operationId: "putSourceFulcrum" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleAds#update + x-speakeasy-entity-operation: Source_Fulcrum#update delete: tags: - "Sources" @@ -11334,10 +13054,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleAds" + operationId: "deleteSourceFulcrum" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleAds#delete + x-speakeasy-entity-operation: Source_Fulcrum#delete parameters: - name: "sourceId" schema: @@ -11345,13 +13065,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleAnalyticsDataApi: + /sources#GainsightPx: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleAnalyticsDataApiCreateRequest" + $ref: "#/components/schemas/SourceGainsightPxCreateRequest" tags: - "Sources" responses: @@ -11365,14 +13085,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleAnalyticsDataApi" + operationId: "createSourceGainsightPx" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#create - /sources/{sourceId}#GoogleAnalyticsDataApi: + x-speakeasy-entity-operation: Source_GainsightPx#create + /sources/{sourceId}#GainsightPx: get: tags: - "Sources" @@ -11387,10 +13107,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleAnalyticsDataApi" + operationId: "getSourceGainsightPx" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#read + x-speakeasy-entity-operation: Source_GainsightPx#read put: tags: - "Sources" @@ -11398,7 +13118,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleAnalyticsDataApiPutRequest" + $ref: "#/components/schemas/SourceGainsightPxPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11406,10 +13126,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleAnalyticsDataApi" + operationId: "putSourceGainsightPx" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#update + x-speakeasy-entity-operation: Source_GainsightPx#update delete: tags: - "Sources" @@ -11420,10 +13140,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleAnalyticsDataApi" + operationId: "deleteSourceGainsightPx" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#delete + x-speakeasy-entity-operation: Source_GainsightPx#delete parameters: - name: "sourceId" schema: @@ -11431,13 +13151,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleCalendar: + /sources#Gcs: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleCalendarCreateRequest" + $ref: "#/components/schemas/SourceGcsCreateRequest" tags: - "Sources" responses: @@ -11451,14 +13171,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleCalendar" + operationId: "createSourceGcs" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleCalendar#create - /sources/{sourceId}#GoogleCalendar: + x-speakeasy-entity-operation: Source_Gcs#create + /sources/{sourceId}#Gcs: get: tags: - "Sources" @@ -11473,10 +13193,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleCalendar" + operationId: "getSourceGcs" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleCalendar#read + x-speakeasy-entity-operation: Source_Gcs#read put: tags: - "Sources" @@ -11484,7 +13204,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleCalendarPutRequest" + $ref: "#/components/schemas/SourceGcsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11492,10 +13212,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleCalendar" + operationId: "putSourceGcs" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleCalendar#update + x-speakeasy-entity-operation: Source_Gcs#update delete: tags: - "Sources" @@ -11506,10 +13226,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleCalendar" + operationId: "deleteSourceGcs" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleCalendar#delete + x-speakeasy-entity-operation: Source_Gcs#delete parameters: - name: "sourceId" schema: @@ -11517,13 +13237,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleDirectory: + /sources#Getgist: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleDirectoryCreateRequest" + $ref: "#/components/schemas/SourceGetgistCreateRequest" tags: - "Sources" responses: @@ -11537,14 +13257,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleDirectory" + operationId: "createSourceGetgist" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleDirectory#create - /sources/{sourceId}#GoogleDirectory: + x-speakeasy-entity-operation: Source_Getgist#create + /sources/{sourceId}#Getgist: get: tags: - "Sources" @@ -11559,10 +13279,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleDirectory" + operationId: "getSourceGetgist" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleDirectory#read + x-speakeasy-entity-operation: Source_Getgist#read put: tags: - "Sources" @@ -11570,7 +13290,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleDirectoryPutRequest" + $ref: "#/components/schemas/SourceGetgistPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11578,10 +13298,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleDirectory" + operationId: "putSourceGetgist" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleDirectory#update + x-speakeasy-entity-operation: Source_Getgist#update delete: tags: - "Sources" @@ -11592,10 +13312,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleDirectory" + operationId: "deleteSourceGetgist" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleDirectory#delete + x-speakeasy-entity-operation: Source_Getgist#delete parameters: - name: "sourceId" schema: @@ -11603,13 +13323,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleDrive: + /sources#Getlago: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleDriveCreateRequest" + $ref: "#/components/schemas/SourceGetlagoCreateRequest" tags: - "Sources" responses: @@ -11623,14 +13343,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleDrive" + operationId: "createSourceGetlago" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleDrive#create - /sources/{sourceId}#GoogleDrive: + x-speakeasy-entity-operation: Source_Getlago#create + /sources/{sourceId}#Getlago: get: tags: - "Sources" @@ -11645,10 +13365,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleDrive" + operationId: "getSourceGetlago" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleDrive#read + x-speakeasy-entity-operation: Source_Getlago#read put: tags: - "Sources" @@ -11656,7 +13376,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleDrivePutRequest" + $ref: "#/components/schemas/SourceGetlagoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11664,10 +13384,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleDrive" + operationId: "putSourceGetlago" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleDrive#update + x-speakeasy-entity-operation: Source_Getlago#update delete: tags: - "Sources" @@ -11678,10 +13398,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleDrive" + operationId: "deleteSourceGetlago" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleDrive#delete + x-speakeasy-entity-operation: Source_Getlago#delete parameters: - name: "sourceId" schema: @@ -11689,13 +13409,13 @@ paths: type: "string" in: "path" required: true - /sources#GooglePagespeedInsights: + /sources#Gitbook: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGooglePagespeedInsightsCreateRequest" + $ref: "#/components/schemas/SourceGitbookCreateRequest" tags: - "Sources" responses: @@ -11709,14 +13429,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGooglePagespeedInsights" + operationId: "createSourceGitbook" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GooglePagespeedInsights#create - /sources/{sourceId}#GooglePagespeedInsights: + x-speakeasy-entity-operation: Source_Gitbook#create + /sources/{sourceId}#Gitbook: get: tags: - "Sources" @@ -11731,10 +13451,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGooglePagespeedInsights" + operationId: "getSourceGitbook" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GooglePagespeedInsights#read + x-speakeasy-entity-operation: Source_Gitbook#read put: tags: - "Sources" @@ -11742,7 +13462,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGooglePagespeedInsightsPutRequest" + $ref: "#/components/schemas/SourceGitbookPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11750,10 +13470,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGooglePagespeedInsights" + operationId: "putSourceGitbook" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GooglePagespeedInsights#update + x-speakeasy-entity-operation: Source_Gitbook#update delete: tags: - "Sources" @@ -11764,10 +13484,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGooglePagespeedInsights" + operationId: "deleteSourceGitbook" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GooglePagespeedInsights#delete + x-speakeasy-entity-operation: Source_Gitbook#delete parameters: - name: "sourceId" schema: @@ -11775,13 +13495,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleSearchConsole: + /sources#Github: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleSearchConsoleCreateRequest" + $ref: "#/components/schemas/SourceGithubCreateRequest" tags: - "Sources" responses: @@ -11795,14 +13515,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleSearchConsole" + operationId: "createSourceGithub" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleSearchConsole#create - /sources/{sourceId}#GoogleSearchConsole: + x-speakeasy-entity-operation: Source_Github#create + /sources/{sourceId}#Github: get: tags: - "Sources" @@ -11817,10 +13537,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleSearchConsole" + operationId: "getSourceGithub" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleSearchConsole#read + x-speakeasy-entity-operation: Source_Github#read put: tags: - "Sources" @@ -11828,7 +13548,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleSearchConsolePutRequest" + $ref: "#/components/schemas/SourceGithubPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11836,10 +13556,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleSearchConsole" + operationId: "putSourceGithub" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleSearchConsole#update + x-speakeasy-entity-operation: Source_Github#update delete: tags: - "Sources" @@ -11850,10 +13570,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleSearchConsole" + operationId: "deleteSourceGithub" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleSearchConsole#delete + x-speakeasy-entity-operation: Source_Github#delete parameters: - name: "sourceId" schema: @@ -11861,13 +13581,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleSheets: + /sources#Gitlab: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleSheetsCreateRequest" + $ref: "#/components/schemas/SourceGitlabCreateRequest" tags: - "Sources" responses: @@ -11881,14 +13601,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleSheets" + operationId: "createSourceGitlab" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleSheets#create - /sources/{sourceId}#GoogleSheets: + x-speakeasy-entity-operation: Source_Gitlab#create + /sources/{sourceId}#Gitlab: get: tags: - "Sources" @@ -11903,10 +13623,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleSheets" + operationId: "getSourceGitlab" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleSheets#read + x-speakeasy-entity-operation: Source_Gitlab#read put: tags: - "Sources" @@ -11914,7 +13634,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleSheetsPutRequest" + $ref: "#/components/schemas/SourceGitlabPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -11922,10 +13642,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleSheets" + operationId: "putSourceGitlab" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleSheets#update + x-speakeasy-entity-operation: Source_Gitlab#update delete: tags: - "Sources" @@ -11936,10 +13656,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleSheets" + operationId: "deleteSourceGitlab" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleSheets#delete + x-speakeasy-entity-operation: Source_Gitlab#delete parameters: - name: "sourceId" schema: @@ -11947,13 +13667,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleTasks: + /sources#Glassfrog: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleTasksCreateRequest" + $ref: "#/components/schemas/SourceGlassfrogCreateRequest" tags: - "Sources" responses: @@ -11967,14 +13687,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleTasks" + operationId: "createSourceGlassfrog" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleTasks#create - /sources/{sourceId}#GoogleTasks: + x-speakeasy-entity-operation: Source_Glassfrog#create + /sources/{sourceId}#Glassfrog: get: tags: - "Sources" @@ -11989,10 +13709,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleTasks" + operationId: "getSourceGlassfrog" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleTasks#read + x-speakeasy-entity-operation: Source_Glassfrog#read put: tags: - "Sources" @@ -12000,7 +13720,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleTasksPutRequest" + $ref: "#/components/schemas/SourceGlassfrogPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12008,10 +13728,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleTasks" + operationId: "putSourceGlassfrog" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleTasks#update + x-speakeasy-entity-operation: Source_Glassfrog#update delete: tags: - "Sources" @@ -12022,10 +13742,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleTasks" + operationId: "deleteSourceGlassfrog" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleTasks#delete + x-speakeasy-entity-operation: Source_Glassfrog#delete parameters: - name: "sourceId" schema: @@ -12033,13 +13753,13 @@ paths: type: "string" in: "path" required: true - /sources#GoogleWebfonts: + /sources#Gmail: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleWebfontsCreateRequest" + $ref: "#/components/schemas/SourceGmailCreateRequest" tags: - "Sources" responses: @@ -12053,14 +13773,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGoogleWebfonts" + operationId: "createSourceGmail" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleWebfonts#create - /sources/{sourceId}#GoogleWebfonts: + x-speakeasy-entity-operation: Source_Gmail#create + /sources/{sourceId}#Gmail: get: tags: - "Sources" @@ -12075,10 +13795,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGoogleWebfonts" + operationId: "getSourceGmail" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleWebfonts#read + x-speakeasy-entity-operation: Source_Gmail#read put: tags: - "Sources" @@ -12086,7 +13806,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGoogleWebfontsPutRequest" + $ref: "#/components/schemas/SourceGmailPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12094,10 +13814,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGoogleWebfonts" + operationId: "putSourceGmail" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleWebfonts#update + x-speakeasy-entity-operation: Source_Gmail#update delete: tags: - "Sources" @@ -12108,10 +13828,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGoogleWebfonts" + operationId: "deleteSourceGmail" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_GoogleWebfonts#delete + x-speakeasy-entity-operation: Source_Gmail#delete parameters: - name: "sourceId" schema: @@ -12119,13 +13839,13 @@ paths: type: "string" in: "path" required: true - /sources#Gorgias: + /sources#Gnews: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGorgiasCreateRequest" + $ref: "#/components/schemas/SourceGnewsCreateRequest" tags: - "Sources" responses: @@ -12139,14 +13859,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGorgias" + operationId: "createSourceGnews" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gorgias#create - /sources/{sourceId}#Gorgias: + x-speakeasy-entity-operation: Source_Gnews#create + /sources/{sourceId}#Gnews: get: tags: - "Sources" @@ -12161,10 +13881,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGorgias" + operationId: "getSourceGnews" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gorgias#read + x-speakeasy-entity-operation: Source_Gnews#read put: tags: - "Sources" @@ -12172,7 +13892,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGorgiasPutRequest" + $ref: "#/components/schemas/SourceGnewsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12180,10 +13900,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGorgias" + operationId: "putSourceGnews" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gorgias#update + x-speakeasy-entity-operation: Source_Gnews#update delete: tags: - "Sources" @@ -12194,10 +13914,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGorgias" + operationId: "deleteSourceGnews" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gorgias#delete + x-speakeasy-entity-operation: Source_Gnews#delete parameters: - name: "sourceId" schema: @@ -12205,13 +13925,13 @@ paths: type: "string" in: "path" required: true - /sources#Greenhouse: + /sources#Gocardless: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGreenhouseCreateRequest" + $ref: "#/components/schemas/SourceGocardlessCreateRequest" tags: - "Sources" responses: @@ -12225,14 +13945,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGreenhouse" + operationId: "createSourceGocardless" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Greenhouse#create - /sources/{sourceId}#Greenhouse: + x-speakeasy-entity-operation: Source_Gocardless#create + /sources/{sourceId}#Gocardless: get: tags: - "Sources" @@ -12247,10 +13967,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGreenhouse" + operationId: "getSourceGocardless" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Greenhouse#read + x-speakeasy-entity-operation: Source_Gocardless#read put: tags: - "Sources" @@ -12258,7 +13978,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGreenhousePutRequest" + $ref: "#/components/schemas/SourceGocardlessPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12266,10 +13986,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGreenhouse" + operationId: "putSourceGocardless" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Greenhouse#update + x-speakeasy-entity-operation: Source_Gocardless#update delete: tags: - "Sources" @@ -12280,10 +14000,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGreenhouse" + operationId: "deleteSourceGocardless" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Greenhouse#delete + x-speakeasy-entity-operation: Source_Gocardless#delete parameters: - name: "sourceId" schema: @@ -12291,13 +14011,13 @@ paths: type: "string" in: "path" required: true - /sources#Gridly: + /sources#Goldcast: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGridlyCreateRequest" + $ref: "#/components/schemas/SourceGoldcastCreateRequest" tags: - "Sources" responses: @@ -12311,14 +14031,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGridly" + operationId: "createSourceGoldcast" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gridly#create - /sources/{sourceId}#Gridly: + x-speakeasy-entity-operation: Source_Goldcast#create + /sources/{sourceId}#Goldcast: get: tags: - "Sources" @@ -12333,10 +14053,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGridly" + operationId: "getSourceGoldcast" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gridly#read + x-speakeasy-entity-operation: Source_Goldcast#read put: tags: - "Sources" @@ -12344,7 +14064,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGridlyPutRequest" + $ref: "#/components/schemas/SourceGoldcastPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12352,10 +14072,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGridly" + operationId: "putSourceGoldcast" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gridly#update + x-speakeasy-entity-operation: Source_Goldcast#update delete: tags: - "Sources" @@ -12366,10 +14086,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGridly" + operationId: "deleteSourceGoldcast" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gridly#delete + x-speakeasy-entity-operation: Source_Goldcast#delete parameters: - name: "sourceId" schema: @@ -12377,13 +14097,13 @@ paths: type: "string" in: "path" required: true - /sources#Guru: + /sources#Gong: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGuruCreateRequest" + $ref: "#/components/schemas/SourceGongCreateRequest" tags: - "Sources" responses: @@ -12397,14 +14117,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGuru" + operationId: "createSourceGong" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Guru#create - /sources/{sourceId}#Guru: + x-speakeasy-entity-operation: Source_Gong#create + /sources/{sourceId}#Gong: get: tags: - "Sources" @@ -12419,10 +14139,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGuru" + operationId: "getSourceGong" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Guru#read + x-speakeasy-entity-operation: Source_Gong#read put: tags: - "Sources" @@ -12430,7 +14150,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGuruPutRequest" + $ref: "#/components/schemas/SourceGongPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12438,10 +14158,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGuru" + operationId: "putSourceGong" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Guru#update + x-speakeasy-entity-operation: Source_Gong#update delete: tags: - "Sources" @@ -12452,10 +14172,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGuru" + operationId: "deleteSourceGong" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Guru#delete + x-speakeasy-entity-operation: Source_Gong#delete parameters: - name: "sourceId" schema: @@ -12463,13 +14183,13 @@ paths: type: "string" in: "path" required: true - /sources#Gutendex: + /sources#GoogleAds: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceGutendexCreateRequest" + $ref: "#/components/schemas/SourceGoogleAdsCreateRequest" tags: - "Sources" responses: @@ -12483,14 +14203,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceGutendex" + operationId: "createSourceGoogleAds" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gutendex#create - /sources/{sourceId}#Gutendex: + x-speakeasy-entity-operation: Source_GoogleAds#create + /sources/{sourceId}#GoogleAds: get: tags: - "Sources" @@ -12505,10 +14225,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceGutendex" + operationId: "getSourceGoogleAds" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gutendex#read + x-speakeasy-entity-operation: Source_GoogleAds#read put: tags: - "Sources" @@ -12516,7 +14236,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceGutendexPutRequest" + $ref: "#/components/schemas/SourceGoogleAdsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12524,10 +14244,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceGutendex" + operationId: "putSourceGoogleAds" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gutendex#update + x-speakeasy-entity-operation: Source_GoogleAds#update delete: tags: - "Sources" @@ -12538,10 +14258,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceGutendex" + operationId: "deleteSourceGoogleAds" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Gutendex#delete + x-speakeasy-entity-operation: Source_GoogleAds#delete parameters: - name: "sourceId" schema: @@ -12549,13 +14269,13 @@ paths: type: "string" in: "path" required: true - /sources#HardcodedRecords: + /sources#GoogleAnalyticsDataApi: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceHardcodedRecordsCreateRequest" + $ref: "#/components/schemas/SourceGoogleAnalyticsDataApiCreateRequest" tags: - "Sources" responses: @@ -12569,14 +14289,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceHardcodedRecords" + operationId: "createSourceGoogleAnalyticsDataApi" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_HardcodedRecords#create - /sources/{sourceId}#HardcodedRecords: + x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#create + /sources/{sourceId}#GoogleAnalyticsDataApi: get: tags: - "Sources" @@ -12591,10 +14311,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceHardcodedRecords" + operationId: "getSourceGoogleAnalyticsDataApi" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_HardcodedRecords#read + x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#read put: tags: - "Sources" @@ -12602,7 +14322,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceHardcodedRecordsPutRequest" + $ref: "#/components/schemas/SourceGoogleAnalyticsDataApiPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12610,10 +14330,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceHardcodedRecords" + operationId: "putSourceGoogleAnalyticsDataApi" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_HardcodedRecords#update + x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#update delete: tags: - "Sources" @@ -12624,10 +14344,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceHardcodedRecords" + operationId: "deleteSourceGoogleAnalyticsDataApi" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_HardcodedRecords#delete + x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#delete parameters: - name: "sourceId" schema: @@ -12635,13 +14355,13 @@ paths: type: "string" in: "path" required: true - /sources#Harvest: + /sources#GoogleCalendar: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceHarvestCreateRequest" + $ref: "#/components/schemas/SourceGoogleCalendarCreateRequest" tags: - "Sources" responses: @@ -12655,14 +14375,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceHarvest" + operationId: "createSourceGoogleCalendar" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Harvest#create - /sources/{sourceId}#Harvest: + x-speakeasy-entity-operation: Source_GoogleCalendar#create + /sources/{sourceId}#GoogleCalendar: get: tags: - "Sources" @@ -12677,10 +14397,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceHarvest" + operationId: "getSourceGoogleCalendar" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Harvest#read + x-speakeasy-entity-operation: Source_GoogleCalendar#read put: tags: - "Sources" @@ -12688,7 +14408,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceHarvestPutRequest" + $ref: "#/components/schemas/SourceGoogleCalendarPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12696,10 +14416,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceHarvest" + operationId: "putSourceGoogleCalendar" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Harvest#update + x-speakeasy-entity-operation: Source_GoogleCalendar#update delete: tags: - "Sources" @@ -12710,10 +14430,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceHarvest" + operationId: "deleteSourceGoogleCalendar" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Harvest#delete + x-speakeasy-entity-operation: Source_GoogleCalendar#delete parameters: - name: "sourceId" schema: @@ -12721,13 +14441,13 @@ paths: type: "string" in: "path" required: true - /sources#Height: + /sources#GoogleClassroom: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceHeightCreateRequest" + $ref: "#/components/schemas/SourceGoogleClassroomCreateRequest" tags: - "Sources" responses: @@ -12741,14 +14461,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceHeight" + operationId: "createSourceGoogleClassroom" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Height#create - /sources/{sourceId}#Height: + x-speakeasy-entity-operation: Source_GoogleClassroom#create + /sources/{sourceId}#GoogleClassroom: get: tags: - "Sources" @@ -12763,10 +14483,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceHeight" + operationId: "getSourceGoogleClassroom" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Height#read + x-speakeasy-entity-operation: Source_GoogleClassroom#read put: tags: - "Sources" @@ -12774,7 +14494,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceHeightPutRequest" + $ref: "#/components/schemas/SourceGoogleClassroomPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12782,10 +14502,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceHeight" + operationId: "putSourceGoogleClassroom" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Height#update + x-speakeasy-entity-operation: Source_GoogleClassroom#update delete: tags: - "Sources" @@ -12796,10 +14516,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceHeight" + operationId: "deleteSourceGoogleClassroom" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Height#delete + x-speakeasy-entity-operation: Source_GoogleClassroom#delete parameters: - name: "sourceId" schema: @@ -12807,13 +14527,13 @@ paths: type: "string" in: "path" required: true - /sources#Hibob: + /sources#GoogleDirectory: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceHibobCreateRequest" + $ref: "#/components/schemas/SourceGoogleDirectoryCreateRequest" tags: - "Sources" responses: @@ -12827,14 +14547,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceHibob" + operationId: "createSourceGoogleDirectory" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hibob#create - /sources/{sourceId}#Hibob: + x-speakeasy-entity-operation: Source_GoogleDirectory#create + /sources/{sourceId}#GoogleDirectory: get: tags: - "Sources" @@ -12849,10 +14569,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceHibob" + operationId: "getSourceGoogleDirectory" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hibob#read + x-speakeasy-entity-operation: Source_GoogleDirectory#read put: tags: - "Sources" @@ -12860,7 +14580,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceHibobPutRequest" + $ref: "#/components/schemas/SourceGoogleDirectoryPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12868,10 +14588,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceHibob" + operationId: "putSourceGoogleDirectory" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hibob#update + x-speakeasy-entity-operation: Source_GoogleDirectory#update delete: tags: - "Sources" @@ -12882,10 +14602,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceHibob" + operationId: "deleteSourceGoogleDirectory" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hibob#delete + x-speakeasy-entity-operation: Source_GoogleDirectory#delete parameters: - name: "sourceId" schema: @@ -12893,13 +14613,13 @@ paths: type: "string" in: "path" required: true - /sources#HighLevel: + /sources#GoogleDrive: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceHighLevelCreateRequest" + $ref: "#/components/schemas/SourceGoogleDriveCreateRequest" tags: - "Sources" responses: @@ -12913,14 +14633,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceHighLevel" + operationId: "createSourceGoogleDrive" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_HighLevel#create - /sources/{sourceId}#HighLevel: + x-speakeasy-entity-operation: Source_GoogleDrive#create + /sources/{sourceId}#GoogleDrive: get: tags: - "Sources" @@ -12935,10 +14655,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceHighLevel" + operationId: "getSourceGoogleDrive" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_HighLevel#read + x-speakeasy-entity-operation: Source_GoogleDrive#read put: tags: - "Sources" @@ -12946,7 +14666,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceHighLevelPutRequest" + $ref: "#/components/schemas/SourceGoogleDrivePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -12954,10 +14674,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceHighLevel" + operationId: "putSourceGoogleDrive" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_HighLevel#update + x-speakeasy-entity-operation: Source_GoogleDrive#update delete: tags: - "Sources" @@ -12968,10 +14688,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceHighLevel" + operationId: "deleteSourceGoogleDrive" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_HighLevel#delete + x-speakeasy-entity-operation: Source_GoogleDrive#delete parameters: - name: "sourceId" schema: @@ -12979,13 +14699,13 @@ paths: type: "string" in: "path" required: true - /sources#Hubplanner: + /sources#GoogleForms: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceHubplannerCreateRequest" + $ref: "#/components/schemas/SourceGoogleFormsCreateRequest" tags: - "Sources" responses: @@ -12999,14 +14719,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceHubplanner" + operationId: "createSourceGoogleForms" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hubplanner#create - /sources/{sourceId}#Hubplanner: + x-speakeasy-entity-operation: Source_GoogleForms#create + /sources/{sourceId}#GoogleForms: get: tags: - "Sources" @@ -13021,10 +14741,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceHubplanner" + operationId: "getSourceGoogleForms" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hubplanner#read + x-speakeasy-entity-operation: Source_GoogleForms#read put: tags: - "Sources" @@ -13032,7 +14752,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceHubplannerPutRequest" + $ref: "#/components/schemas/SourceGoogleFormsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13040,10 +14760,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceHubplanner" + operationId: "putSourceGoogleForms" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hubplanner#update + x-speakeasy-entity-operation: Source_GoogleForms#update delete: tags: - "Sources" @@ -13054,10 +14774,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceHubplanner" + operationId: "deleteSourceGoogleForms" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hubplanner#delete + x-speakeasy-entity-operation: Source_GoogleForms#delete parameters: - name: "sourceId" schema: @@ -13065,13 +14785,13 @@ paths: type: "string" in: "path" required: true - /sources#Hubspot: + /sources#GooglePagespeedInsights: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceHubspotCreateRequest" + $ref: "#/components/schemas/SourceGooglePagespeedInsightsCreateRequest" tags: - "Sources" responses: @@ -13085,14 +14805,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceHubspot" + operationId: "createSourceGooglePagespeedInsights" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hubspot#create - /sources/{sourceId}#Hubspot: + x-speakeasy-entity-operation: Source_GooglePagespeedInsights#create + /sources/{sourceId}#GooglePagespeedInsights: get: tags: - "Sources" @@ -13107,10 +14827,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceHubspot" + operationId: "getSourceGooglePagespeedInsights" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hubspot#read + x-speakeasy-entity-operation: Source_GooglePagespeedInsights#read put: tags: - "Sources" @@ -13118,7 +14838,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceHubspotPutRequest" + $ref: "#/components/schemas/SourceGooglePagespeedInsightsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13126,10 +14846,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceHubspot" + operationId: "putSourceGooglePagespeedInsights" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hubspot#update + x-speakeasy-entity-operation: Source_GooglePagespeedInsights#update delete: tags: - "Sources" @@ -13140,10 +14860,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceHubspot" + operationId: "deleteSourceGooglePagespeedInsights" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Hubspot#delete + x-speakeasy-entity-operation: Source_GooglePagespeedInsights#delete parameters: - name: "sourceId" schema: @@ -13151,13 +14871,13 @@ paths: type: "string" in: "path" required: true - /sources#IlluminaBasespace: + /sources#GoogleSearchConsole: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceIlluminaBasespaceCreateRequest" + $ref: "#/components/schemas/SourceGoogleSearchConsoleCreateRequest" tags: - "Sources" responses: @@ -13171,14 +14891,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceIlluminaBasespace" + operationId: "createSourceGoogleSearchConsole" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_IlluminaBasespace#create - /sources/{sourceId}#IlluminaBasespace: + x-speakeasy-entity-operation: Source_GoogleSearchConsole#create + /sources/{sourceId}#GoogleSearchConsole: get: tags: - "Sources" @@ -13193,10 +14913,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceIlluminaBasespace" + operationId: "getSourceGoogleSearchConsole" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_IlluminaBasespace#read + x-speakeasy-entity-operation: Source_GoogleSearchConsole#read put: tags: - "Sources" @@ -13204,7 +14924,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceIlluminaBasespacePutRequest" + $ref: "#/components/schemas/SourceGoogleSearchConsolePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13212,10 +14932,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceIlluminaBasespace" + operationId: "putSourceGoogleSearchConsole" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_IlluminaBasespace#update + x-speakeasy-entity-operation: Source_GoogleSearchConsole#update delete: tags: - "Sources" @@ -13226,10 +14946,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceIlluminaBasespace" + operationId: "deleteSourceGoogleSearchConsole" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_IlluminaBasespace#delete + x-speakeasy-entity-operation: Source_GoogleSearchConsole#delete parameters: - name: "sourceId" schema: @@ -13237,13 +14957,13 @@ paths: type: "string" in: "path" required: true - /sources#IncidentIo: + /sources#GoogleSheets: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceIncidentIoCreateRequest" + $ref: "#/components/schemas/SourceGoogleSheetsCreateRequest" tags: - "Sources" responses: @@ -13257,14 +14977,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceIncidentIo" + operationId: "createSourceGoogleSheets" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_IncidentIo#create - /sources/{sourceId}#IncidentIo: + x-speakeasy-entity-operation: Source_GoogleSheets#create + /sources/{sourceId}#GoogleSheets: get: tags: - "Sources" @@ -13279,10 +14999,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceIncidentIo" + operationId: "getSourceGoogleSheets" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_IncidentIo#read + x-speakeasy-entity-operation: Source_GoogleSheets#read put: tags: - "Sources" @@ -13290,7 +15010,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceIncidentIoPutRequest" + $ref: "#/components/schemas/SourceGoogleSheetsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13298,10 +15018,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceIncidentIo" + operationId: "putSourceGoogleSheets" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_IncidentIo#update + x-speakeasy-entity-operation: Source_GoogleSheets#update delete: tags: - "Sources" @@ -13312,10 +15032,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceIncidentIo" + operationId: "deleteSourceGoogleSheets" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_IncidentIo#delete + x-speakeasy-entity-operation: Source_GoogleSheets#delete parameters: - name: "sourceId" schema: @@ -13323,13 +15043,13 @@ paths: type: "string" in: "path" required: true - /sources#Insightly: + /sources#GoogleTasks: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceInsightlyCreateRequest" + $ref: "#/components/schemas/SourceGoogleTasksCreateRequest" tags: - "Sources" responses: @@ -13343,14 +15063,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceInsightly" + operationId: "createSourceGoogleTasks" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Insightly#create - /sources/{sourceId}#Insightly: + x-speakeasy-entity-operation: Source_GoogleTasks#create + /sources/{sourceId}#GoogleTasks: get: tags: - "Sources" @@ -13365,10 +15085,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceInsightly" + operationId: "getSourceGoogleTasks" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Insightly#read + x-speakeasy-entity-operation: Source_GoogleTasks#read put: tags: - "Sources" @@ -13376,7 +15096,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceInsightlyPutRequest" + $ref: "#/components/schemas/SourceGoogleTasksPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13384,10 +15104,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceInsightly" + operationId: "putSourceGoogleTasks" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Insightly#update + x-speakeasy-entity-operation: Source_GoogleTasks#update delete: tags: - "Sources" @@ -13398,10 +15118,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceInsightly" + operationId: "deleteSourceGoogleTasks" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Insightly#delete + x-speakeasy-entity-operation: Source_GoogleTasks#delete parameters: - name: "sourceId" schema: @@ -13409,13 +15129,13 @@ paths: type: "string" in: "path" required: true - /sources#Instagram: + /sources#GoogleWebfonts: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceInstagramCreateRequest" + $ref: "#/components/schemas/SourceGoogleWebfontsCreateRequest" tags: - "Sources" responses: @@ -13429,14 +15149,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceInstagram" + operationId: "createSourceGoogleWebfonts" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Instagram#create - /sources/{sourceId}#Instagram: + x-speakeasy-entity-operation: Source_GoogleWebfonts#create + /sources/{sourceId}#GoogleWebfonts: get: tags: - "Sources" @@ -13451,10 +15171,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceInstagram" + operationId: "getSourceGoogleWebfonts" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Instagram#read + x-speakeasy-entity-operation: Source_GoogleWebfonts#read put: tags: - "Sources" @@ -13462,7 +15182,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceInstagramPutRequest" + $ref: "#/components/schemas/SourceGoogleWebfontsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13470,10 +15190,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceInstagram" + operationId: "putSourceGoogleWebfonts" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Instagram#update + x-speakeasy-entity-operation: Source_GoogleWebfonts#update delete: tags: - "Sources" @@ -13484,10 +15204,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceInstagram" + operationId: "deleteSourceGoogleWebfonts" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Instagram#delete + x-speakeasy-entity-operation: Source_GoogleWebfonts#delete parameters: - name: "sourceId" schema: @@ -13495,13 +15215,13 @@ paths: type: "string" in: "path" required: true - /sources#Instatus: + /sources#Gorgias: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceInstatusCreateRequest" + $ref: "#/components/schemas/SourceGorgiasCreateRequest" tags: - "Sources" responses: @@ -13515,14 +15235,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceInstatus" + operationId: "createSourceGorgias" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Instatus#create - /sources/{sourceId}#Instatus: + x-speakeasy-entity-operation: Source_Gorgias#create + /sources/{sourceId}#Gorgias: get: tags: - "Sources" @@ -13537,10 +15257,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceInstatus" + operationId: "getSourceGorgias" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Instatus#read + x-speakeasy-entity-operation: Source_Gorgias#read put: tags: - "Sources" @@ -13548,7 +15268,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceInstatusPutRequest" + $ref: "#/components/schemas/SourceGorgiasPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13556,10 +15276,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceInstatus" + operationId: "putSourceGorgias" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Instatus#update + x-speakeasy-entity-operation: Source_Gorgias#update delete: tags: - "Sources" @@ -13570,10 +15290,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceInstatus" + operationId: "deleteSourceGorgias" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Instatus#delete + x-speakeasy-entity-operation: Source_Gorgias#delete parameters: - name: "sourceId" schema: @@ -13581,13 +15301,13 @@ paths: type: "string" in: "path" required: true - /sources#Intercom: + /sources#Greenhouse: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceIntercomCreateRequest" + $ref: "#/components/schemas/SourceGreenhouseCreateRequest" tags: - "Sources" responses: @@ -13601,14 +15321,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceIntercom" + operationId: "createSourceGreenhouse" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Intercom#create - /sources/{sourceId}#Intercom: + x-speakeasy-entity-operation: Source_Greenhouse#create + /sources/{sourceId}#Greenhouse: get: tags: - "Sources" @@ -13623,10 +15343,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceIntercom" + operationId: "getSourceGreenhouse" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Intercom#read + x-speakeasy-entity-operation: Source_Greenhouse#read put: tags: - "Sources" @@ -13634,7 +15354,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceIntercomPutRequest" + $ref: "#/components/schemas/SourceGreenhousePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13642,10 +15362,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceIntercom" + operationId: "putSourceGreenhouse" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Intercom#update + x-speakeasy-entity-operation: Source_Greenhouse#update delete: tags: - "Sources" @@ -13656,10 +15376,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceIntercom" + operationId: "deleteSourceGreenhouse" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Intercom#delete + x-speakeasy-entity-operation: Source_Greenhouse#delete parameters: - name: "sourceId" schema: @@ -13667,13 +15387,13 @@ paths: type: "string" in: "path" required: true - /sources#Invoiced: + /sources#Gridly: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceInvoicedCreateRequest" + $ref: "#/components/schemas/SourceGridlyCreateRequest" tags: - "Sources" responses: @@ -13687,14 +15407,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceInvoiced" + operationId: "createSourceGridly" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Invoiced#create - /sources/{sourceId}#Invoiced: + x-speakeasy-entity-operation: Source_Gridly#create + /sources/{sourceId}#Gridly: get: tags: - "Sources" @@ -13709,10 +15429,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceInvoiced" + operationId: "getSourceGridly" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Invoiced#read + x-speakeasy-entity-operation: Source_Gridly#read put: tags: - "Sources" @@ -13720,7 +15440,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceInvoicedPutRequest" + $ref: "#/components/schemas/SourceGridlyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13728,10 +15448,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceInvoiced" + operationId: "putSourceGridly" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Invoiced#update + x-speakeasy-entity-operation: Source_Gridly#update delete: tags: - "Sources" @@ -13742,10 +15462,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceInvoiced" + operationId: "deleteSourceGridly" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Invoiced#delete + x-speakeasy-entity-operation: Source_Gridly#delete parameters: - name: "sourceId" schema: @@ -13753,13 +15473,13 @@ paths: type: "string" in: "path" required: true - /sources#Ip2whois: + /sources#Guru: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceIp2whoisCreateRequest" + $ref: "#/components/schemas/SourceGuruCreateRequest" tags: - "Sources" responses: @@ -13773,14 +15493,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceIp2whois" + operationId: "createSourceGuru" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Ip2whois#create - /sources/{sourceId}#Ip2whois: + x-speakeasy-entity-operation: Source_Guru#create + /sources/{sourceId}#Guru: get: tags: - "Sources" @@ -13795,10 +15515,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceIp2whois" + operationId: "getSourceGuru" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Ip2whois#read + x-speakeasy-entity-operation: Source_Guru#read put: tags: - "Sources" @@ -13806,7 +15526,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceIp2whoisPutRequest" + $ref: "#/components/schemas/SourceGuruPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13814,10 +15534,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceIp2whois" + operationId: "putSourceGuru" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Ip2whois#update + x-speakeasy-entity-operation: Source_Guru#update delete: tags: - "Sources" @@ -13828,10 +15548,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceIp2whois" + operationId: "deleteSourceGuru" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Ip2whois#delete + x-speakeasy-entity-operation: Source_Guru#delete parameters: - name: "sourceId" schema: @@ -13839,13 +15559,13 @@ paths: type: "string" in: "path" required: true - /sources#Iterable: + /sources#Gutendex: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceIterableCreateRequest" + $ref: "#/components/schemas/SourceGutendexCreateRequest" tags: - "Sources" responses: @@ -13859,14 +15579,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceIterable" + operationId: "createSourceGutendex" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Iterable#create - /sources/{sourceId}#Iterable: + x-speakeasy-entity-operation: Source_Gutendex#create + /sources/{sourceId}#Gutendex: get: tags: - "Sources" @@ -13881,10 +15601,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceIterable" + operationId: "getSourceGutendex" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Iterable#read + x-speakeasy-entity-operation: Source_Gutendex#read put: tags: - "Sources" @@ -13892,7 +15612,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceIterablePutRequest" + $ref: "#/components/schemas/SourceGutendexPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13900,10 +15620,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceIterable" + operationId: "putSourceGutendex" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Iterable#update + x-speakeasy-entity-operation: Source_Gutendex#update delete: tags: - "Sources" @@ -13914,10 +15634,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceIterable" + operationId: "deleteSourceGutendex" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Iterable#delete + x-speakeasy-entity-operation: Source_Gutendex#delete parameters: - name: "sourceId" schema: @@ -13925,13 +15645,13 @@ paths: type: "string" in: "path" required: true - /sources#Jira: + /sources#HardcodedRecords: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceJiraCreateRequest" + $ref: "#/components/schemas/SourceHardcodedRecordsCreateRequest" tags: - "Sources" responses: @@ -13945,14 +15665,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceJira" + operationId: "createSourceHardcodedRecords" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Jira#create - /sources/{sourceId}#Jira: + x-speakeasy-entity-operation: Source_HardcodedRecords#create + /sources/{sourceId}#HardcodedRecords: get: tags: - "Sources" @@ -13967,10 +15687,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceJira" + operationId: "getSourceHardcodedRecords" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Jira#read + x-speakeasy-entity-operation: Source_HardcodedRecords#read put: tags: - "Sources" @@ -13978,7 +15698,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceJiraPutRequest" + $ref: "#/components/schemas/SourceHardcodedRecordsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -13986,10 +15706,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceJira" + operationId: "putSourceHardcodedRecords" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Jira#update + x-speakeasy-entity-operation: Source_HardcodedRecords#update delete: tags: - "Sources" @@ -14000,10 +15720,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceJira" + operationId: "deleteSourceHardcodedRecords" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Jira#delete + x-speakeasy-entity-operation: Source_HardcodedRecords#delete parameters: - name: "sourceId" schema: @@ -14011,13 +15731,13 @@ paths: type: "string" in: "path" required: true - /sources#Jotform: + /sources#Harvest: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceJotformCreateRequest" + $ref: "#/components/schemas/SourceHarvestCreateRequest" tags: - "Sources" responses: @@ -14031,14 +15751,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceJotform" + operationId: "createSourceHarvest" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Jotform#create - /sources/{sourceId}#Jotform: + x-speakeasy-entity-operation: Source_Harvest#create + /sources/{sourceId}#Harvest: get: tags: - "Sources" @@ -14053,10 +15773,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceJotform" + operationId: "getSourceHarvest" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Jotform#read + x-speakeasy-entity-operation: Source_Harvest#read put: tags: - "Sources" @@ -14064,7 +15784,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceJotformPutRequest" + $ref: "#/components/schemas/SourceHarvestPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14072,10 +15792,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceJotform" + operationId: "putSourceHarvest" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Jotform#update + x-speakeasy-entity-operation: Source_Harvest#update delete: tags: - "Sources" @@ -14086,10 +15806,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceJotform" + operationId: "deleteSourceHarvest" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Jotform#delete + x-speakeasy-entity-operation: Source_Harvest#delete parameters: - name: "sourceId" schema: @@ -14097,13 +15817,13 @@ paths: type: "string" in: "path" required: true - /sources#JustSift: + /sources#Height: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceJustSiftCreateRequest" + $ref: "#/components/schemas/SourceHeightCreateRequest" tags: - "Sources" responses: @@ -14117,14 +15837,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceJustSift" + operationId: "createSourceHeight" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_JustSift#create - /sources/{sourceId}#JustSift: + x-speakeasy-entity-operation: Source_Height#create + /sources/{sourceId}#Height: get: tags: - "Sources" @@ -14139,10 +15859,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceJustSift" + operationId: "getSourceHeight" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_JustSift#read + x-speakeasy-entity-operation: Source_Height#read put: tags: - "Sources" @@ -14150,7 +15870,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceJustSiftPutRequest" + $ref: "#/components/schemas/SourceHeightPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14158,10 +15878,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceJustSift" + operationId: "putSourceHeight" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_JustSift#update + x-speakeasy-entity-operation: Source_Height#update delete: tags: - "Sources" @@ -14172,10 +15892,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceJustSift" + operationId: "deleteSourceHeight" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_JustSift#delete + x-speakeasy-entity-operation: Source_Height#delete parameters: - name: "sourceId" schema: @@ -14183,13 +15903,13 @@ paths: type: "string" in: "path" required: true - /sources#Justcall: + /sources#Hibob: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceJustcallCreateRequest" + $ref: "#/components/schemas/SourceHibobCreateRequest" tags: - "Sources" responses: @@ -14203,14 +15923,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceJustcall" + operationId: "createSourceHibob" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Justcall#create - /sources/{sourceId}#Justcall: + x-speakeasy-entity-operation: Source_Hibob#create + /sources/{sourceId}#Hibob: get: tags: - "Sources" @@ -14225,10 +15945,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceJustcall" + operationId: "getSourceHibob" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Justcall#read + x-speakeasy-entity-operation: Source_Hibob#read put: tags: - "Sources" @@ -14236,7 +15956,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceJustcallPutRequest" + $ref: "#/components/schemas/SourceHibobPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14244,10 +15964,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceJustcall" + operationId: "putSourceHibob" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Justcall#update + x-speakeasy-entity-operation: Source_Hibob#update delete: tags: - "Sources" @@ -14258,10 +15978,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceJustcall" + operationId: "deleteSourceHibob" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Justcall#delete + x-speakeasy-entity-operation: Source_Hibob#delete parameters: - name: "sourceId" schema: @@ -14269,13 +15989,13 @@ paths: type: "string" in: "path" required: true - /sources#K6Cloud: + /sources#HighLevel: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceK6CloudCreateRequest" + $ref: "#/components/schemas/SourceHighLevelCreateRequest" tags: - "Sources" responses: @@ -14289,14 +16009,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceK6Cloud" + operationId: "createSourceHighLevel" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_K6Cloud#create - /sources/{sourceId}#K6Cloud: + x-speakeasy-entity-operation: Source_HighLevel#create + /sources/{sourceId}#HighLevel: get: tags: - "Sources" @@ -14311,10 +16031,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceK6Cloud" + operationId: "getSourceHighLevel" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_K6Cloud#read + x-speakeasy-entity-operation: Source_HighLevel#read put: tags: - "Sources" @@ -14322,7 +16042,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceK6CloudPutRequest" + $ref: "#/components/schemas/SourceHighLevelPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14330,10 +16050,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceK6Cloud" + operationId: "putSourceHighLevel" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_K6Cloud#update + x-speakeasy-entity-operation: Source_HighLevel#update delete: tags: - "Sources" @@ -14344,10 +16064,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceK6Cloud" + operationId: "deleteSourceHighLevel" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_K6Cloud#delete + x-speakeasy-entity-operation: Source_HighLevel#delete parameters: - name: "sourceId" schema: @@ -14355,13 +16075,13 @@ paths: type: "string" in: "path" required: true - /sources#Katana: + /sources#Hubplanner: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceKatanaCreateRequest" + $ref: "#/components/schemas/SourceHubplannerCreateRequest" tags: - "Sources" responses: @@ -14375,14 +16095,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceKatana" + operationId: "createSourceHubplanner" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Katana#create - /sources/{sourceId}#Katana: + x-speakeasy-entity-operation: Source_Hubplanner#create + /sources/{sourceId}#Hubplanner: get: tags: - "Sources" @@ -14397,10 +16117,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceKatana" + operationId: "getSourceHubplanner" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Katana#read + x-speakeasy-entity-operation: Source_Hubplanner#read put: tags: - "Sources" @@ -14408,7 +16128,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceKatanaPutRequest" + $ref: "#/components/schemas/SourceHubplannerPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14416,10 +16136,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceKatana" + operationId: "putSourceHubplanner" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Katana#update + x-speakeasy-entity-operation: Source_Hubplanner#update delete: tags: - "Sources" @@ -14430,10 +16150,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceKatana" + operationId: "deleteSourceHubplanner" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Katana#delete + x-speakeasy-entity-operation: Source_Hubplanner#delete parameters: - name: "sourceId" schema: @@ -14441,13 +16161,13 @@ paths: type: "string" in: "path" required: true - /sources#Kisi: + /sources#Hubspot: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceKisiCreateRequest" + $ref: "#/components/schemas/SourceHubspotCreateRequest" tags: - "Sources" responses: @@ -14461,14 +16181,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceKisi" + operationId: "createSourceHubspot" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kisi#create - /sources/{sourceId}#Kisi: + x-speakeasy-entity-operation: Source_Hubspot#create + /sources/{sourceId}#Hubspot: get: tags: - "Sources" @@ -14483,10 +16203,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceKisi" + operationId: "getSourceHubspot" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kisi#read + x-speakeasy-entity-operation: Source_Hubspot#read put: tags: - "Sources" @@ -14494,7 +16214,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceKisiPutRequest" + $ref: "#/components/schemas/SourceHubspotPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14502,10 +16222,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceKisi" + operationId: "putSourceHubspot" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kisi#update + x-speakeasy-entity-operation: Source_Hubspot#update delete: tags: - "Sources" @@ -14516,10 +16236,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceKisi" + operationId: "deleteSourceHubspot" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kisi#delete + x-speakeasy-entity-operation: Source_Hubspot#delete parameters: - name: "sourceId" schema: @@ -14527,13 +16247,13 @@ paths: type: "string" in: "path" required: true - /sources#Kissmetrics: + /sources#Humanitix: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceKissmetricsCreateRequest" + $ref: "#/components/schemas/SourceHumanitixCreateRequest" tags: - "Sources" responses: @@ -14547,14 +16267,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceKissmetrics" + operationId: "createSourceHumanitix" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kissmetrics#create - /sources/{sourceId}#Kissmetrics: + x-speakeasy-entity-operation: Source_Humanitix#create + /sources/{sourceId}#Humanitix: get: tags: - "Sources" @@ -14569,10 +16289,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceKissmetrics" + operationId: "getSourceHumanitix" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kissmetrics#read + x-speakeasy-entity-operation: Source_Humanitix#read put: tags: - "Sources" @@ -14580,7 +16300,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceKissmetricsPutRequest" + $ref: "#/components/schemas/SourceHumanitixPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14588,10 +16308,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceKissmetrics" + operationId: "putSourceHumanitix" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kissmetrics#update + x-speakeasy-entity-operation: Source_Humanitix#update delete: tags: - "Sources" @@ -14602,10 +16322,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceKissmetrics" + operationId: "deleteSourceHumanitix" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kissmetrics#delete + x-speakeasy-entity-operation: Source_Humanitix#delete parameters: - name: "sourceId" schema: @@ -14613,13 +16333,13 @@ paths: type: "string" in: "path" required: true - /sources#Klarna: + /sources#IlluminaBasespace: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceKlarnaCreateRequest" + $ref: "#/components/schemas/SourceIlluminaBasespaceCreateRequest" tags: - "Sources" responses: @@ -14633,14 +16353,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceKlarna" + operationId: "createSourceIlluminaBasespace" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Klarna#create - /sources/{sourceId}#Klarna: + x-speakeasy-entity-operation: Source_IlluminaBasespace#create + /sources/{sourceId}#IlluminaBasespace: get: tags: - "Sources" @@ -14655,10 +16375,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceKlarna" + operationId: "getSourceIlluminaBasespace" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Klarna#read + x-speakeasy-entity-operation: Source_IlluminaBasespace#read put: tags: - "Sources" @@ -14666,7 +16386,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceKlarnaPutRequest" + $ref: "#/components/schemas/SourceIlluminaBasespacePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14674,10 +16394,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceKlarna" + operationId: "putSourceIlluminaBasespace" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Klarna#update + x-speakeasy-entity-operation: Source_IlluminaBasespace#update delete: tags: - "Sources" @@ -14688,10 +16408,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceKlarna" + operationId: "deleteSourceIlluminaBasespace" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Klarna#delete + x-speakeasy-entity-operation: Source_IlluminaBasespace#delete parameters: - name: "sourceId" schema: @@ -14699,13 +16419,13 @@ paths: type: "string" in: "path" required: true - /sources#KlausApi: + /sources#IncidentIo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceKlausApiCreateRequest" + $ref: "#/components/schemas/SourceIncidentIoCreateRequest" tags: - "Sources" responses: @@ -14719,14 +16439,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceKlausApi" + operationId: "createSourceIncidentIo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_KlausApi#create - /sources/{sourceId}#KlausApi: + x-speakeasy-entity-operation: Source_IncidentIo#create + /sources/{sourceId}#IncidentIo: get: tags: - "Sources" @@ -14741,10 +16461,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceKlausApi" + operationId: "getSourceIncidentIo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_KlausApi#read + x-speakeasy-entity-operation: Source_IncidentIo#read put: tags: - "Sources" @@ -14752,7 +16472,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceKlausApiPutRequest" + $ref: "#/components/schemas/SourceIncidentIoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14760,10 +16480,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceKlausApi" + operationId: "putSourceIncidentIo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_KlausApi#update + x-speakeasy-entity-operation: Source_IncidentIo#update delete: tags: - "Sources" @@ -14774,10 +16494,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceKlausApi" + operationId: "deleteSourceIncidentIo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_KlausApi#delete + x-speakeasy-entity-operation: Source_IncidentIo#delete parameters: - name: "sourceId" schema: @@ -14785,13 +16505,13 @@ paths: type: "string" in: "path" required: true - /sources#Klaviyo: + /sources#Inflowinventory: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceKlaviyoCreateRequest" + $ref: "#/components/schemas/SourceInflowinventoryCreateRequest" tags: - "Sources" responses: @@ -14805,14 +16525,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceKlaviyo" + operationId: "createSourceInflowinventory" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Klaviyo#create - /sources/{sourceId}#Klaviyo: + x-speakeasy-entity-operation: Source_Inflowinventory#create + /sources/{sourceId}#Inflowinventory: get: tags: - "Sources" @@ -14827,10 +16547,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceKlaviyo" + operationId: "getSourceInflowinventory" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Klaviyo#read + x-speakeasy-entity-operation: Source_Inflowinventory#read put: tags: - "Sources" @@ -14838,7 +16558,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceKlaviyoPutRequest" + $ref: "#/components/schemas/SourceInflowinventoryPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14846,10 +16566,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceKlaviyo" + operationId: "putSourceInflowinventory" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Klaviyo#update + x-speakeasy-entity-operation: Source_Inflowinventory#update delete: tags: - "Sources" @@ -14860,10 +16580,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceKlaviyo" + operationId: "deleteSourceInflowinventory" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Klaviyo#delete + x-speakeasy-entity-operation: Source_Inflowinventory#delete parameters: - name: "sourceId" schema: @@ -14871,13 +16591,13 @@ paths: type: "string" in: "path" required: true - /sources#Kyve: + /sources#Insightly: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceKyveCreateRequest" + $ref: "#/components/schemas/SourceInsightlyCreateRequest" tags: - "Sources" responses: @@ -14891,14 +16611,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceKyve" + operationId: "createSourceInsightly" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kyve#create - /sources/{sourceId}#Kyve: + x-speakeasy-entity-operation: Source_Insightly#create + /sources/{sourceId}#Insightly: get: tags: - "Sources" @@ -14913,10 +16633,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceKyve" + operationId: "getSourceInsightly" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kyve#read + x-speakeasy-entity-operation: Source_Insightly#read put: tags: - "Sources" @@ -14924,7 +16644,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceKyvePutRequest" + $ref: "#/components/schemas/SourceInsightlyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -14932,10 +16652,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceKyve" + operationId: "putSourceInsightly" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kyve#update + x-speakeasy-entity-operation: Source_Insightly#update delete: tags: - "Sources" @@ -14946,10 +16666,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceKyve" + operationId: "deleteSourceInsightly" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Kyve#delete + x-speakeasy-entity-operation: Source_Insightly#delete parameters: - name: "sourceId" schema: @@ -14957,13 +16677,13 @@ paths: type: "string" in: "path" required: true - /sources#Launchdarkly: + /sources#Instagram: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLaunchdarklyCreateRequest" + $ref: "#/components/schemas/SourceInstagramCreateRequest" tags: - "Sources" responses: @@ -14977,14 +16697,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLaunchdarkly" + operationId: "createSourceInstagram" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Launchdarkly#create - /sources/{sourceId}#Launchdarkly: + x-speakeasy-entity-operation: Source_Instagram#create + /sources/{sourceId}#Instagram: get: tags: - "Sources" @@ -14999,10 +16719,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLaunchdarkly" + operationId: "getSourceInstagram" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Launchdarkly#read + x-speakeasy-entity-operation: Source_Instagram#read put: tags: - "Sources" @@ -15010,7 +16730,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLaunchdarklyPutRequest" + $ref: "#/components/schemas/SourceInstagramPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15018,10 +16738,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLaunchdarkly" + operationId: "putSourceInstagram" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Launchdarkly#update + x-speakeasy-entity-operation: Source_Instagram#update delete: tags: - "Sources" @@ -15032,10 +16752,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLaunchdarkly" + operationId: "deleteSourceInstagram" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Launchdarkly#delete + x-speakeasy-entity-operation: Source_Instagram#delete parameters: - name: "sourceId" schema: @@ -15043,13 +16763,13 @@ paths: type: "string" in: "path" required: true - /sources#Leadfeeder: + /sources#Instatus: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLeadfeederCreateRequest" + $ref: "#/components/schemas/SourceInstatusCreateRequest" tags: - "Sources" responses: @@ -15063,14 +16783,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLeadfeeder" + operationId: "createSourceInstatus" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Leadfeeder#create - /sources/{sourceId}#Leadfeeder: + x-speakeasy-entity-operation: Source_Instatus#create + /sources/{sourceId}#Instatus: get: tags: - "Sources" @@ -15085,10 +16805,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLeadfeeder" + operationId: "getSourceInstatus" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Leadfeeder#read + x-speakeasy-entity-operation: Source_Instatus#read put: tags: - "Sources" @@ -15096,7 +16816,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLeadfeederPutRequest" + $ref: "#/components/schemas/SourceInstatusPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15104,10 +16824,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLeadfeeder" + operationId: "putSourceInstatus" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Leadfeeder#update + x-speakeasy-entity-operation: Source_Instatus#update delete: tags: - "Sources" @@ -15118,10 +16838,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLeadfeeder" + operationId: "deleteSourceInstatus" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Leadfeeder#delete + x-speakeasy-entity-operation: Source_Instatus#delete parameters: - name: "sourceId" schema: @@ -15129,13 +16849,13 @@ paths: type: "string" in: "path" required: true - /sources#Lemlist: + /sources#Intercom: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLemlistCreateRequest" + $ref: "#/components/schemas/SourceIntercomCreateRequest" tags: - "Sources" responses: @@ -15149,14 +16869,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLemlist" + operationId: "createSourceIntercom" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lemlist#create - /sources/{sourceId}#Lemlist: + x-speakeasy-entity-operation: Source_Intercom#create + /sources/{sourceId}#Intercom: get: tags: - "Sources" @@ -15171,10 +16891,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLemlist" + operationId: "getSourceIntercom" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lemlist#read + x-speakeasy-entity-operation: Source_Intercom#read put: tags: - "Sources" @@ -15182,7 +16902,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLemlistPutRequest" + $ref: "#/components/schemas/SourceIntercomPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15190,10 +16910,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLemlist" + operationId: "putSourceIntercom" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lemlist#update + x-speakeasy-entity-operation: Source_Intercom#update delete: tags: - "Sources" @@ -15204,10 +16924,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLemlist" + operationId: "deleteSourceIntercom" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lemlist#delete + x-speakeasy-entity-operation: Source_Intercom#delete parameters: - name: "sourceId" schema: @@ -15215,13 +16935,13 @@ paths: type: "string" in: "path" required: true - /sources#LeverHiring: + /sources#Invoiced: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLeverHiringCreateRequest" + $ref: "#/components/schemas/SourceInvoicedCreateRequest" tags: - "Sources" responses: @@ -15235,14 +16955,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLeverHiring" + operationId: "createSourceInvoiced" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LeverHiring#create - /sources/{sourceId}#LeverHiring: + x-speakeasy-entity-operation: Source_Invoiced#create + /sources/{sourceId}#Invoiced: get: tags: - "Sources" @@ -15257,10 +16977,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLeverHiring" + operationId: "getSourceInvoiced" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LeverHiring#read + x-speakeasy-entity-operation: Source_Invoiced#read put: tags: - "Sources" @@ -15268,7 +16988,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLeverHiringPutRequest" + $ref: "#/components/schemas/SourceInvoicedPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15276,10 +16996,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLeverHiring" + operationId: "putSourceInvoiced" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LeverHiring#update + x-speakeasy-entity-operation: Source_Invoiced#update delete: tags: - "Sources" @@ -15290,10 +17010,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLeverHiring" + operationId: "deleteSourceInvoiced" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LeverHiring#delete + x-speakeasy-entity-operation: Source_Invoiced#delete parameters: - name: "sourceId" schema: @@ -15301,13 +17021,13 @@ paths: type: "string" in: "path" required: true - /sources#LinkedinAds: + /sources#Invoiceninja: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLinkedinAdsCreateRequest" + $ref: "#/components/schemas/SourceInvoiceninjaCreateRequest" tags: - "Sources" responses: @@ -15321,14 +17041,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLinkedinAds" + operationId: "createSourceInvoiceninja" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LinkedinAds#create - /sources/{sourceId}#LinkedinAds: + x-speakeasy-entity-operation: Source_Invoiceninja#create + /sources/{sourceId}#Invoiceninja: get: tags: - "Sources" @@ -15343,10 +17063,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLinkedinAds" + operationId: "getSourceInvoiceninja" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LinkedinAds#read + x-speakeasy-entity-operation: Source_Invoiceninja#read put: tags: - "Sources" @@ -15354,7 +17074,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLinkedinAdsPutRequest" + $ref: "#/components/schemas/SourceInvoiceninjaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15362,10 +17082,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLinkedinAds" + operationId: "putSourceInvoiceninja" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LinkedinAds#update + x-speakeasy-entity-operation: Source_Invoiceninja#update delete: tags: - "Sources" @@ -15376,10 +17096,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLinkedinAds" + operationId: "deleteSourceInvoiceninja" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LinkedinAds#delete + x-speakeasy-entity-operation: Source_Invoiceninja#delete parameters: - name: "sourceId" schema: @@ -15387,13 +17107,13 @@ paths: type: "string" in: "path" required: true - /sources#LinkedinPages: + /sources#Ip2whois: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLinkedinPagesCreateRequest" + $ref: "#/components/schemas/SourceIp2whoisCreateRequest" tags: - "Sources" responses: @@ -15407,14 +17127,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLinkedinPages" + operationId: "createSourceIp2whois" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LinkedinPages#create - /sources/{sourceId}#LinkedinPages: + x-speakeasy-entity-operation: Source_Ip2whois#create + /sources/{sourceId}#Ip2whois: get: tags: - "Sources" @@ -15429,10 +17149,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLinkedinPages" + operationId: "getSourceIp2whois" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LinkedinPages#read + x-speakeasy-entity-operation: Source_Ip2whois#read put: tags: - "Sources" @@ -15440,7 +17160,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLinkedinPagesPutRequest" + $ref: "#/components/schemas/SourceIp2whoisPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15448,10 +17168,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLinkedinPages" + operationId: "putSourceIp2whois" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LinkedinPages#update + x-speakeasy-entity-operation: Source_Ip2whois#update delete: tags: - "Sources" @@ -15462,10 +17182,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLinkedinPages" + operationId: "deleteSourceIp2whois" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_LinkedinPages#delete + x-speakeasy-entity-operation: Source_Ip2whois#delete parameters: - name: "sourceId" schema: @@ -15473,13 +17193,13 @@ paths: type: "string" in: "path" required: true - /sources#Linnworks: + /sources#Iterable: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLinnworksCreateRequest" + $ref: "#/components/schemas/SourceIterableCreateRequest" tags: - "Sources" responses: @@ -15493,14 +17213,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLinnworks" + operationId: "createSourceIterable" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Linnworks#create - /sources/{sourceId}#Linnworks: + x-speakeasy-entity-operation: Source_Iterable#create + /sources/{sourceId}#Iterable: get: tags: - "Sources" @@ -15515,10 +17235,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLinnworks" + operationId: "getSourceIterable" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Linnworks#read + x-speakeasy-entity-operation: Source_Iterable#read put: tags: - "Sources" @@ -15526,7 +17246,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLinnworksPutRequest" + $ref: "#/components/schemas/SourceIterablePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15534,10 +17254,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLinnworks" + operationId: "putSourceIterable" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Linnworks#update + x-speakeasy-entity-operation: Source_Iterable#update delete: tags: - "Sources" @@ -15548,10 +17268,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLinnworks" + operationId: "deleteSourceIterable" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Linnworks#delete + x-speakeasy-entity-operation: Source_Iterable#delete parameters: - name: "sourceId" schema: @@ -15559,13 +17279,13 @@ paths: type: "string" in: "path" required: true - /sources#Lob: + /sources#Jira: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLobCreateRequest" + $ref: "#/components/schemas/SourceJiraCreateRequest" tags: - "Sources" responses: @@ -15579,14 +17299,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLob" + operationId: "createSourceJira" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lob#create - /sources/{sourceId}#Lob: + x-speakeasy-entity-operation: Source_Jira#create + /sources/{sourceId}#Jira: get: tags: - "Sources" @@ -15601,10 +17321,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLob" + operationId: "getSourceJira" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lob#read + x-speakeasy-entity-operation: Source_Jira#read put: tags: - "Sources" @@ -15612,7 +17332,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLobPutRequest" + $ref: "#/components/schemas/SourceJiraPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15620,10 +17340,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLob" + operationId: "putSourceJira" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lob#update + x-speakeasy-entity-operation: Source_Jira#update delete: tags: - "Sources" @@ -15634,10 +17354,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLob" + operationId: "deleteSourceJira" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lob#delete + x-speakeasy-entity-operation: Source_Jira#delete parameters: - name: "sourceId" schema: @@ -15645,13 +17365,13 @@ paths: type: "string" in: "path" required: true - /sources#Lokalise: + /sources#Jobnimbus: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLokaliseCreateRequest" + $ref: "#/components/schemas/SourceJobnimbusCreateRequest" tags: - "Sources" responses: @@ -15665,14 +17385,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLokalise" + operationId: "createSourceJobnimbus" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lokalise#create - /sources/{sourceId}#Lokalise: + x-speakeasy-entity-operation: Source_Jobnimbus#create + /sources/{sourceId}#Jobnimbus: get: tags: - "Sources" @@ -15687,10 +17407,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLokalise" + operationId: "getSourceJobnimbus" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lokalise#read + x-speakeasy-entity-operation: Source_Jobnimbus#read put: tags: - "Sources" @@ -15698,7 +17418,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLokalisePutRequest" + $ref: "#/components/schemas/SourceJobnimbusPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15706,10 +17426,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLokalise" + operationId: "putSourceJobnimbus" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lokalise#update + x-speakeasy-entity-operation: Source_Jobnimbus#update delete: tags: - "Sources" @@ -15720,10 +17440,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLokalise" + operationId: "deleteSourceJobnimbus" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Lokalise#delete + x-speakeasy-entity-operation: Source_Jobnimbus#delete parameters: - name: "sourceId" schema: @@ -15731,13 +17451,13 @@ paths: type: "string" in: "path" required: true - /sources#Looker: + /sources#Jotform: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLookerCreateRequest" + $ref: "#/components/schemas/SourceJotformCreateRequest" tags: - "Sources" responses: @@ -15751,14 +17471,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLooker" + operationId: "createSourceJotform" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Looker#create - /sources/{sourceId}#Looker: + x-speakeasy-entity-operation: Source_Jotform#create + /sources/{sourceId}#Jotform: get: tags: - "Sources" @@ -15773,10 +17493,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLooker" + operationId: "getSourceJotform" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Looker#read + x-speakeasy-entity-operation: Source_Jotform#read put: tags: - "Sources" @@ -15784,7 +17504,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLookerPutRequest" + $ref: "#/components/schemas/SourceJotformPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15792,10 +17512,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLooker" + operationId: "putSourceJotform" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Looker#update + x-speakeasy-entity-operation: Source_Jotform#update delete: tags: - "Sources" @@ -15806,10 +17526,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLooker" + operationId: "deleteSourceJotform" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Looker#delete + x-speakeasy-entity-operation: Source_Jotform#delete parameters: - name: "sourceId" schema: @@ -15817,13 +17537,13 @@ paths: type: "string" in: "path" required: true - /sources#Luma: + /sources#JustSift: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceLumaCreateRequest" + $ref: "#/components/schemas/SourceJustSiftCreateRequest" tags: - "Sources" responses: @@ -15837,14 +17557,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceLuma" + operationId: "createSourceJustSift" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Luma#create - /sources/{sourceId}#Luma: + x-speakeasy-entity-operation: Source_JustSift#create + /sources/{sourceId}#JustSift: get: tags: - "Sources" @@ -15859,10 +17579,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceLuma" + operationId: "getSourceJustSift" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Luma#read + x-speakeasy-entity-operation: Source_JustSift#read put: tags: - "Sources" @@ -15870,7 +17590,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceLumaPutRequest" + $ref: "#/components/schemas/SourceJustSiftPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15878,10 +17598,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceLuma" + operationId: "putSourceJustSift" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Luma#update + x-speakeasy-entity-operation: Source_JustSift#update delete: tags: - "Sources" @@ -15892,10 +17612,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceLuma" + operationId: "deleteSourceJustSift" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Luma#delete + x-speakeasy-entity-operation: Source_JustSift#delete parameters: - name: "sourceId" schema: @@ -15903,13 +17623,13 @@ paths: type: "string" in: "path" required: true - /sources#Mailchimp: + /sources#Justcall: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMailchimpCreateRequest" + $ref: "#/components/schemas/SourceJustcallCreateRequest" tags: - "Sources" responses: @@ -15923,14 +17643,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMailchimp" + operationId: "createSourceJustcall" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailchimp#create - /sources/{sourceId}#Mailchimp: + x-speakeasy-entity-operation: Source_Justcall#create + /sources/{sourceId}#Justcall: get: tags: - "Sources" @@ -15945,10 +17665,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMailchimp" + operationId: "getSourceJustcall" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailchimp#read + x-speakeasy-entity-operation: Source_Justcall#read put: tags: - "Sources" @@ -15956,7 +17676,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMailchimpPutRequest" + $ref: "#/components/schemas/SourceJustcallPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -15964,10 +17684,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMailchimp" + operationId: "putSourceJustcall" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailchimp#update + x-speakeasy-entity-operation: Source_Justcall#update delete: tags: - "Sources" @@ -15978,10 +17698,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMailchimp" + operationId: "deleteSourceJustcall" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailchimp#delete + x-speakeasy-entity-operation: Source_Justcall#delete parameters: - name: "sourceId" schema: @@ -15989,13 +17709,13 @@ paths: type: "string" in: "path" required: true - /sources#Mailerlite: + /sources#K6Cloud: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMailerliteCreateRequest" + $ref: "#/components/schemas/SourceK6CloudCreateRequest" tags: - "Sources" responses: @@ -16009,14 +17729,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMailerlite" + operationId: "createSourceK6Cloud" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailerlite#create - /sources/{sourceId}#Mailerlite: + x-speakeasy-entity-operation: Source_K6Cloud#create + /sources/{sourceId}#K6Cloud: get: tags: - "Sources" @@ -16031,10 +17751,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMailerlite" + operationId: "getSourceK6Cloud" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailerlite#read + x-speakeasy-entity-operation: Source_K6Cloud#read put: tags: - "Sources" @@ -16042,7 +17762,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMailerlitePutRequest" + $ref: "#/components/schemas/SourceK6CloudPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16050,10 +17770,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMailerlite" + operationId: "putSourceK6Cloud" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailerlite#update + x-speakeasy-entity-operation: Source_K6Cloud#update delete: tags: - "Sources" @@ -16064,10 +17784,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMailerlite" + operationId: "deleteSourceK6Cloud" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailerlite#delete + x-speakeasy-entity-operation: Source_K6Cloud#delete parameters: - name: "sourceId" schema: @@ -16075,13 +17795,13 @@ paths: type: "string" in: "path" required: true - /sources#Mailgun: + /sources#Katana: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMailgunCreateRequest" + $ref: "#/components/schemas/SourceKatanaCreateRequest" tags: - "Sources" responses: @@ -16095,14 +17815,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMailgun" + operationId: "createSourceKatana" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailgun#create - /sources/{sourceId}#Mailgun: + x-speakeasy-entity-operation: Source_Katana#create + /sources/{sourceId}#Katana: get: tags: - "Sources" @@ -16117,10 +17837,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMailgun" + operationId: "getSourceKatana" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailgun#read + x-speakeasy-entity-operation: Source_Katana#read put: tags: - "Sources" @@ -16128,7 +17848,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMailgunPutRequest" + $ref: "#/components/schemas/SourceKatanaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16136,10 +17856,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMailgun" + operationId: "putSourceKatana" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailgun#update + x-speakeasy-entity-operation: Source_Katana#update delete: tags: - "Sources" @@ -16150,10 +17870,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMailgun" + operationId: "deleteSourceKatana" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mailgun#delete + x-speakeasy-entity-operation: Source_Katana#delete parameters: - name: "sourceId" schema: @@ -16161,13 +17881,13 @@ paths: type: "string" in: "path" required: true - /sources#MailjetMail: + /sources#Kisi: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMailjetMailCreateRequest" + $ref: "#/components/schemas/SourceKisiCreateRequest" tags: - "Sources" responses: @@ -16181,14 +17901,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMailjetMail" + operationId: "createSourceKisi" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MailjetMail#create - /sources/{sourceId}#MailjetMail: + x-speakeasy-entity-operation: Source_Kisi#create + /sources/{sourceId}#Kisi: get: tags: - "Sources" @@ -16203,10 +17923,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMailjetMail" + operationId: "getSourceKisi" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MailjetMail#read + x-speakeasy-entity-operation: Source_Kisi#read put: tags: - "Sources" @@ -16214,7 +17934,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMailjetMailPutRequest" + $ref: "#/components/schemas/SourceKisiPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16222,10 +17942,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMailjetMail" + operationId: "putSourceKisi" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MailjetMail#update + x-speakeasy-entity-operation: Source_Kisi#update delete: tags: - "Sources" @@ -16236,10 +17956,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMailjetMail" + operationId: "deleteSourceKisi" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MailjetMail#delete + x-speakeasy-entity-operation: Source_Kisi#delete parameters: - name: "sourceId" schema: @@ -16247,13 +17967,13 @@ paths: type: "string" in: "path" required: true - /sources#MailjetSms: + /sources#Kissmetrics: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMailjetSmsCreateRequest" + $ref: "#/components/schemas/SourceKissmetricsCreateRequest" tags: - "Sources" responses: @@ -16267,14 +17987,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMailjetSms" + operationId: "createSourceKissmetrics" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MailjetSms#create - /sources/{sourceId}#MailjetSms: + x-speakeasy-entity-operation: Source_Kissmetrics#create + /sources/{sourceId}#Kissmetrics: get: tags: - "Sources" @@ -16289,10 +18009,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMailjetSms" + operationId: "getSourceKissmetrics" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MailjetSms#read + x-speakeasy-entity-operation: Source_Kissmetrics#read put: tags: - "Sources" @@ -16300,7 +18020,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMailjetSmsPutRequest" + $ref: "#/components/schemas/SourceKissmetricsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16308,10 +18028,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMailjetSms" + operationId: "putSourceKissmetrics" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MailjetSms#update + x-speakeasy-entity-operation: Source_Kissmetrics#update delete: tags: - "Sources" @@ -16322,10 +18042,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMailjetSms" + operationId: "deleteSourceKissmetrics" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MailjetSms#delete + x-speakeasy-entity-operation: Source_Kissmetrics#delete parameters: - name: "sourceId" schema: @@ -16333,13 +18053,13 @@ paths: type: "string" in: "path" required: true - /sources#Marketo: + /sources#Klarna: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMarketoCreateRequest" + $ref: "#/components/schemas/SourceKlarnaCreateRequest" tags: - "Sources" responses: @@ -16353,14 +18073,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMarketo" + operationId: "createSourceKlarna" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Marketo#create - /sources/{sourceId}#Marketo: + x-speakeasy-entity-operation: Source_Klarna#create + /sources/{sourceId}#Klarna: get: tags: - "Sources" @@ -16375,10 +18095,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMarketo" + operationId: "getSourceKlarna" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Marketo#read + x-speakeasy-entity-operation: Source_Klarna#read put: tags: - "Sources" @@ -16386,7 +18106,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMarketoPutRequest" + $ref: "#/components/schemas/SourceKlarnaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16394,10 +18114,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMarketo" + operationId: "putSourceKlarna" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Marketo#update + x-speakeasy-entity-operation: Source_Klarna#update delete: tags: - "Sources" @@ -16408,10 +18128,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMarketo" + operationId: "deleteSourceKlarna" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Marketo#delete + x-speakeasy-entity-operation: Source_Klarna#delete parameters: - name: "sourceId" schema: @@ -16419,13 +18139,13 @@ paths: type: "string" in: "path" required: true - /sources#Mention: + /sources#KlausApi: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMentionCreateRequest" + $ref: "#/components/schemas/SourceKlausApiCreateRequest" tags: - "Sources" responses: @@ -16439,14 +18159,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMention" + operationId: "createSourceKlausApi" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mention#create - /sources/{sourceId}#Mention: + x-speakeasy-entity-operation: Source_KlausApi#create + /sources/{sourceId}#KlausApi: get: tags: - "Sources" @@ -16461,10 +18181,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMention" + operationId: "getSourceKlausApi" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mention#read + x-speakeasy-entity-operation: Source_KlausApi#read put: tags: - "Sources" @@ -16472,7 +18192,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMentionPutRequest" + $ref: "#/components/schemas/SourceKlausApiPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16480,10 +18200,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMention" + operationId: "putSourceKlausApi" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mention#update + x-speakeasy-entity-operation: Source_KlausApi#update delete: tags: - "Sources" @@ -16494,10 +18214,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMention" + operationId: "deleteSourceKlausApi" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mention#delete + x-speakeasy-entity-operation: Source_KlausApi#delete parameters: - name: "sourceId" schema: @@ -16505,13 +18225,13 @@ paths: type: "string" in: "path" required: true - /sources#Metabase: + /sources#Klaviyo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMetabaseCreateRequest" + $ref: "#/components/schemas/SourceKlaviyoCreateRequest" tags: - "Sources" responses: @@ -16525,14 +18245,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMetabase" + operationId: "createSourceKlaviyo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Metabase#create - /sources/{sourceId}#Metabase: + x-speakeasy-entity-operation: Source_Klaviyo#create + /sources/{sourceId}#Klaviyo: get: tags: - "Sources" @@ -16547,10 +18267,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMetabase" + operationId: "getSourceKlaviyo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Metabase#read + x-speakeasy-entity-operation: Source_Klaviyo#read put: tags: - "Sources" @@ -16558,7 +18278,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMetabasePutRequest" + $ref: "#/components/schemas/SourceKlaviyoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16566,10 +18286,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMetabase" + operationId: "putSourceKlaviyo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Metabase#update + x-speakeasy-entity-operation: Source_Klaviyo#update delete: tags: - "Sources" @@ -16580,10 +18300,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMetabase" + operationId: "deleteSourceKlaviyo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Metabase#delete + x-speakeasy-entity-operation: Source_Klaviyo#delete parameters: - name: "sourceId" schema: @@ -16591,13 +18311,13 @@ paths: type: "string" in: "path" required: true - /sources#MicrosoftDataverse: + /sources#Kyve: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftDataverseCreateRequest" + $ref: "#/components/schemas/SourceKyveCreateRequest" tags: - "Sources" responses: @@ -16611,14 +18331,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMicrosoftDataverse" + operationId: "createSourceKyve" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftDataverse#create - /sources/{sourceId}#MicrosoftDataverse: + x-speakeasy-entity-operation: Source_Kyve#create + /sources/{sourceId}#Kyve: get: tags: - "Sources" @@ -16633,10 +18353,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMicrosoftDataverse" + operationId: "getSourceKyve" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftDataverse#read + x-speakeasy-entity-operation: Source_Kyve#read put: tags: - "Sources" @@ -16644,7 +18364,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftDataversePutRequest" + $ref: "#/components/schemas/SourceKyvePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16652,10 +18372,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMicrosoftDataverse" + operationId: "putSourceKyve" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftDataverse#update + x-speakeasy-entity-operation: Source_Kyve#update delete: tags: - "Sources" @@ -16666,10 +18386,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMicrosoftDataverse" + operationId: "deleteSourceKyve" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftDataverse#delete + x-speakeasy-entity-operation: Source_Kyve#delete parameters: - name: "sourceId" schema: @@ -16677,13 +18397,13 @@ paths: type: "string" in: "path" required: true - /sources#MicrosoftEntraId: + /sources#Launchdarkly: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftEntraIdCreateRequest" + $ref: "#/components/schemas/SourceLaunchdarklyCreateRequest" tags: - "Sources" responses: @@ -16697,14 +18417,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMicrosoftEntraId" + operationId: "createSourceLaunchdarkly" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftEntraId#create - /sources/{sourceId}#MicrosoftEntraId: + x-speakeasy-entity-operation: Source_Launchdarkly#create + /sources/{sourceId}#Launchdarkly: get: tags: - "Sources" @@ -16719,10 +18439,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMicrosoftEntraId" + operationId: "getSourceLaunchdarkly" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftEntraId#read + x-speakeasy-entity-operation: Source_Launchdarkly#read put: tags: - "Sources" @@ -16730,7 +18450,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftEntraIdPutRequest" + $ref: "#/components/schemas/SourceLaunchdarklyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16738,10 +18458,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMicrosoftEntraId" + operationId: "putSourceLaunchdarkly" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftEntraId#update + x-speakeasy-entity-operation: Source_Launchdarkly#update delete: tags: - "Sources" @@ -16752,10 +18472,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMicrosoftEntraId" + operationId: "deleteSourceLaunchdarkly" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftEntraId#delete + x-speakeasy-entity-operation: Source_Launchdarkly#delete parameters: - name: "sourceId" schema: @@ -16763,13 +18483,13 @@ paths: type: "string" in: "path" required: true - /sources#MicrosoftLists: + /sources#Leadfeeder: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftListsCreateRequest" + $ref: "#/components/schemas/SourceLeadfeederCreateRequest" tags: - "Sources" responses: @@ -16783,14 +18503,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMicrosoftLists" + operationId: "createSourceLeadfeeder" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftLists#create - /sources/{sourceId}#MicrosoftLists: + x-speakeasy-entity-operation: Source_Leadfeeder#create + /sources/{sourceId}#Leadfeeder: get: tags: - "Sources" @@ -16805,10 +18525,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMicrosoftLists" + operationId: "getSourceLeadfeeder" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftLists#read + x-speakeasy-entity-operation: Source_Leadfeeder#read put: tags: - "Sources" @@ -16816,7 +18536,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftListsPutRequest" + $ref: "#/components/schemas/SourceLeadfeederPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16824,10 +18544,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMicrosoftLists" + operationId: "putSourceLeadfeeder" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftLists#update + x-speakeasy-entity-operation: Source_Leadfeeder#update delete: tags: - "Sources" @@ -16838,10 +18558,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMicrosoftLists" + operationId: "deleteSourceLeadfeeder" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftLists#delete + x-speakeasy-entity-operation: Source_Leadfeeder#delete parameters: - name: "sourceId" schema: @@ -16849,13 +18569,13 @@ paths: type: "string" in: "path" required: true - /sources#MicrosoftOnedrive: + /sources#Lemlist: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftOnedriveCreateRequest" + $ref: "#/components/schemas/SourceLemlistCreateRequest" tags: - "Sources" responses: @@ -16869,14 +18589,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMicrosoftOnedrive" + operationId: "createSourceLemlist" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftOnedrive#create - /sources/{sourceId}#MicrosoftOnedrive: + x-speakeasy-entity-operation: Source_Lemlist#create + /sources/{sourceId}#Lemlist: get: tags: - "Sources" @@ -16891,10 +18611,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMicrosoftOnedrive" + operationId: "getSourceLemlist" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftOnedrive#read + x-speakeasy-entity-operation: Source_Lemlist#read put: tags: - "Sources" @@ -16902,7 +18622,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftOnedrivePutRequest" + $ref: "#/components/schemas/SourceLemlistPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16910,10 +18630,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMicrosoftOnedrive" + operationId: "putSourceLemlist" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftOnedrive#update + x-speakeasy-entity-operation: Source_Lemlist#update delete: tags: - "Sources" @@ -16924,10 +18644,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMicrosoftOnedrive" + operationId: "deleteSourceLemlist" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftOnedrive#delete + x-speakeasy-entity-operation: Source_Lemlist#delete parameters: - name: "sourceId" schema: @@ -16935,13 +18655,13 @@ paths: type: "string" in: "path" required: true - /sources#MicrosoftSharepoint: + /sources#LessAnnoyingCrm: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftSharepointCreateRequest" + $ref: "#/components/schemas/SourceLessAnnoyingCrmCreateRequest" tags: - "Sources" responses: @@ -16955,14 +18675,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMicrosoftSharepoint" + operationId: "createSourceLessAnnoyingCrm" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftSharepoint#create - /sources/{sourceId}#MicrosoftSharepoint: + x-speakeasy-entity-operation: Source_LessAnnoyingCrm#create + /sources/{sourceId}#LessAnnoyingCrm: get: tags: - "Sources" @@ -16977,10 +18697,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMicrosoftSharepoint" + operationId: "getSourceLessAnnoyingCrm" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftSharepoint#read + x-speakeasy-entity-operation: Source_LessAnnoyingCrm#read put: tags: - "Sources" @@ -16988,7 +18708,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftSharepointPutRequest" + $ref: "#/components/schemas/SourceLessAnnoyingCrmPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -16996,10 +18716,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMicrosoftSharepoint" + operationId: "putSourceLessAnnoyingCrm" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftSharepoint#update + x-speakeasy-entity-operation: Source_LessAnnoyingCrm#update delete: tags: - "Sources" @@ -17010,10 +18730,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMicrosoftSharepoint" + operationId: "deleteSourceLessAnnoyingCrm" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftSharepoint#delete + x-speakeasy-entity-operation: Source_LessAnnoyingCrm#delete parameters: - name: "sourceId" schema: @@ -17021,13 +18741,13 @@ paths: type: "string" in: "path" required: true - /sources#MicrosoftTeams: + /sources#LeverHiring: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftTeamsCreateRequest" + $ref: "#/components/schemas/SourceLeverHiringCreateRequest" tags: - "Sources" responses: @@ -17041,14 +18761,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMicrosoftTeams" + operationId: "createSourceLeverHiring" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftTeams#create - /sources/{sourceId}#MicrosoftTeams: + x-speakeasy-entity-operation: Source_LeverHiring#create + /sources/{sourceId}#LeverHiring: get: tags: - "Sources" @@ -17063,10 +18783,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMicrosoftTeams" + operationId: "getSourceLeverHiring" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftTeams#read + x-speakeasy-entity-operation: Source_LeverHiring#read put: tags: - "Sources" @@ -17074,7 +18794,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMicrosoftTeamsPutRequest" + $ref: "#/components/schemas/SourceLeverHiringPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17082,10 +18802,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMicrosoftTeams" + operationId: "putSourceLeverHiring" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftTeams#update + x-speakeasy-entity-operation: Source_LeverHiring#update delete: tags: - "Sources" @@ -17096,10 +18816,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMicrosoftTeams" + operationId: "deleteSourceLeverHiring" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MicrosoftTeams#delete + x-speakeasy-entity-operation: Source_LeverHiring#delete parameters: - name: "sourceId" schema: @@ -17107,13 +18827,13 @@ paths: type: "string" in: "path" required: true - /sources#Miro: + /sources#LightspeedRetail: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMiroCreateRequest" + $ref: "#/components/schemas/SourceLightspeedRetailCreateRequest" tags: - "Sources" responses: @@ -17127,14 +18847,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMiro" + operationId: "createSourceLightspeedRetail" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Miro#create - /sources/{sourceId}#Miro: + x-speakeasy-entity-operation: Source_LightspeedRetail#create + /sources/{sourceId}#LightspeedRetail: get: tags: - "Sources" @@ -17149,10 +18869,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMiro" + operationId: "getSourceLightspeedRetail" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Miro#read + x-speakeasy-entity-operation: Source_LightspeedRetail#read put: tags: - "Sources" @@ -17160,7 +18880,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMiroPutRequest" + $ref: "#/components/schemas/SourceLightspeedRetailPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17168,10 +18888,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMiro" + operationId: "putSourceLightspeedRetail" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Miro#update + x-speakeasy-entity-operation: Source_LightspeedRetail#update delete: tags: - "Sources" @@ -17182,10 +18902,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMiro" + operationId: "deleteSourceLightspeedRetail" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Miro#delete + x-speakeasy-entity-operation: Source_LightspeedRetail#delete parameters: - name: "sourceId" schema: @@ -17193,13 +18913,13 @@ paths: type: "string" in: "path" required: true - /sources#Missive: + /sources#LinkedinAds: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMissiveCreateRequest" + $ref: "#/components/schemas/SourceLinkedinAdsCreateRequest" tags: - "Sources" responses: @@ -17213,14 +18933,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMissive" + operationId: "createSourceLinkedinAds" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Missive#create - /sources/{sourceId}#Missive: + x-speakeasy-entity-operation: Source_LinkedinAds#create + /sources/{sourceId}#LinkedinAds: get: tags: - "Sources" @@ -17235,10 +18955,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMissive" + operationId: "getSourceLinkedinAds" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Missive#read + x-speakeasy-entity-operation: Source_LinkedinAds#read put: tags: - "Sources" @@ -17246,7 +18966,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMissivePutRequest" + $ref: "#/components/schemas/SourceLinkedinAdsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17254,10 +18974,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMissive" + operationId: "putSourceLinkedinAds" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Missive#update + x-speakeasy-entity-operation: Source_LinkedinAds#update delete: tags: - "Sources" @@ -17268,10 +18988,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMissive" + operationId: "deleteSourceLinkedinAds" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Missive#delete + x-speakeasy-entity-operation: Source_LinkedinAds#delete parameters: - name: "sourceId" schema: @@ -17279,13 +18999,13 @@ paths: type: "string" in: "path" required: true - /sources#Mixmax: + /sources#LinkedinPages: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMixmaxCreateRequest" + $ref: "#/components/schemas/SourceLinkedinPagesCreateRequest" tags: - "Sources" responses: @@ -17299,14 +19019,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMixmax" + operationId: "createSourceLinkedinPages" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mixmax#create - /sources/{sourceId}#Mixmax: + x-speakeasy-entity-operation: Source_LinkedinPages#create + /sources/{sourceId}#LinkedinPages: get: tags: - "Sources" @@ -17321,10 +19041,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMixmax" + operationId: "getSourceLinkedinPages" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mixmax#read + x-speakeasy-entity-operation: Source_LinkedinPages#read put: tags: - "Sources" @@ -17332,7 +19052,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMixmaxPutRequest" + $ref: "#/components/schemas/SourceLinkedinPagesPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17340,10 +19060,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMixmax" + operationId: "putSourceLinkedinPages" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mixmax#update + x-speakeasy-entity-operation: Source_LinkedinPages#update delete: tags: - "Sources" @@ -17354,10 +19074,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMixmax" + operationId: "deleteSourceLinkedinPages" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mixmax#delete + x-speakeasy-entity-operation: Source_LinkedinPages#delete parameters: - name: "sourceId" schema: @@ -17365,13 +19085,13 @@ paths: type: "string" in: "path" required: true - /sources#Mixpanel: + /sources#Linnworks: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMixpanelCreateRequest" + $ref: "#/components/schemas/SourceLinnworksCreateRequest" tags: - "Sources" responses: @@ -17385,14 +19105,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMixpanel" + operationId: "createSourceLinnworks" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mixpanel#create - /sources/{sourceId}#Mixpanel: + x-speakeasy-entity-operation: Source_Linnworks#create + /sources/{sourceId}#Linnworks: get: tags: - "Sources" @@ -17407,10 +19127,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMixpanel" + operationId: "getSourceLinnworks" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mixpanel#read + x-speakeasy-entity-operation: Source_Linnworks#read put: tags: - "Sources" @@ -17418,7 +19138,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMixpanelPutRequest" + $ref: "#/components/schemas/SourceLinnworksPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17426,10 +19146,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMixpanel" + operationId: "putSourceLinnworks" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mixpanel#update + x-speakeasy-entity-operation: Source_Linnworks#update delete: tags: - "Sources" @@ -17440,10 +19160,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMixpanel" + operationId: "deleteSourceLinnworks" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mixpanel#delete + x-speakeasy-entity-operation: Source_Linnworks#delete parameters: - name: "sourceId" schema: @@ -17451,13 +19171,13 @@ paths: type: "string" in: "path" required: true - /sources#Mode: + /sources#Lob: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceModeCreateRequest" + $ref: "#/components/schemas/SourceLobCreateRequest" tags: - "Sources" responses: @@ -17471,14 +19191,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMode" + operationId: "createSourceLob" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mode#create - /sources/{sourceId}#Mode: + x-speakeasy-entity-operation: Source_Lob#create + /sources/{sourceId}#Lob: get: tags: - "Sources" @@ -17493,10 +19213,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMode" + operationId: "getSourceLob" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mode#read + x-speakeasy-entity-operation: Source_Lob#read put: tags: - "Sources" @@ -17504,7 +19224,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceModePutRequest" + $ref: "#/components/schemas/SourceLobPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17512,10 +19232,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMode" + operationId: "putSourceLob" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mode#update + x-speakeasy-entity-operation: Source_Lob#update delete: tags: - "Sources" @@ -17526,10 +19246,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMode" + operationId: "deleteSourceLob" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mode#delete + x-speakeasy-entity-operation: Source_Lob#delete parameters: - name: "sourceId" schema: @@ -17537,13 +19257,13 @@ paths: type: "string" in: "path" required: true - /sources#Monday: + /sources#Lokalise: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMondayCreateRequest" + $ref: "#/components/schemas/SourceLokaliseCreateRequest" tags: - "Sources" responses: @@ -17557,14 +19277,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMonday" + operationId: "createSourceLokalise" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Monday#create - /sources/{sourceId}#Monday: + x-speakeasy-entity-operation: Source_Lokalise#create + /sources/{sourceId}#Lokalise: get: tags: - "Sources" @@ -17579,10 +19299,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMonday" + operationId: "getSourceLokalise" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Monday#read + x-speakeasy-entity-operation: Source_Lokalise#read put: tags: - "Sources" @@ -17590,7 +19310,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMondayPutRequest" + $ref: "#/components/schemas/SourceLokalisePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17598,10 +19318,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMonday" + operationId: "putSourceLokalise" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Monday#update + x-speakeasy-entity-operation: Source_Lokalise#update delete: tags: - "Sources" @@ -17612,10 +19332,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMonday" + operationId: "deleteSourceLokalise" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Monday#delete + x-speakeasy-entity-operation: Source_Lokalise#delete parameters: - name: "sourceId" schema: @@ -17623,13 +19343,13 @@ paths: type: "string" in: "path" required: true - /sources#MongodbV2: + /sources#Looker: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMongodbV2CreateRequest" + $ref: "#/components/schemas/SourceLookerCreateRequest" tags: - "Sources" responses: @@ -17643,14 +19363,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMongodbV2" + operationId: "createSourceLooker" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MongodbV2#create - /sources/{sourceId}#MongodbV2: + x-speakeasy-entity-operation: Source_Looker#create + /sources/{sourceId}#Looker: get: tags: - "Sources" @@ -17665,10 +19385,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMongodbV2" + operationId: "getSourceLooker" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MongodbV2#read + x-speakeasy-entity-operation: Source_Looker#read put: tags: - "Sources" @@ -17676,7 +19396,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMongodbV2PutRequest" + $ref: "#/components/schemas/SourceLookerPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17684,10 +19404,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMongodbV2" + operationId: "putSourceLooker" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MongodbV2#update + x-speakeasy-entity-operation: Source_Looker#update delete: tags: - "Sources" @@ -17698,10 +19418,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMongodbV2" + operationId: "deleteSourceLooker" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MongodbV2#delete + x-speakeasy-entity-operation: Source_Looker#delete parameters: - name: "sourceId" schema: @@ -17709,13 +19429,13 @@ paths: type: "string" in: "path" required: true - /sources#Mssql: + /sources#Luma: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMssqlCreateRequest" + $ref: "#/components/schemas/SourceLumaCreateRequest" tags: - "Sources" responses: @@ -17729,14 +19449,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMssql" + operationId: "createSourceLuma" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mssql#create - /sources/{sourceId}#Mssql: + x-speakeasy-entity-operation: Source_Luma#create + /sources/{sourceId}#Luma: get: tags: - "Sources" @@ -17751,10 +19471,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMssql" + operationId: "getSourceLuma" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mssql#read + x-speakeasy-entity-operation: Source_Luma#read put: tags: - "Sources" @@ -17762,7 +19482,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMssqlPutRequest" + $ref: "#/components/schemas/SourceLumaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17770,10 +19490,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMssql" + operationId: "putSourceLuma" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mssql#update + x-speakeasy-entity-operation: Source_Luma#update delete: tags: - "Sources" @@ -17784,10 +19504,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMssql" + operationId: "deleteSourceLuma" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mssql#delete + x-speakeasy-entity-operation: Source_Luma#delete parameters: - name: "sourceId" schema: @@ -17795,13 +19515,13 @@ paths: type: "string" in: "path" required: true - /sources#Mux: + /sources#Mailchimp: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMuxCreateRequest" + $ref: "#/components/schemas/SourceMailchimpCreateRequest" tags: - "Sources" responses: @@ -17815,14 +19535,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMux" + operationId: "createSourceMailchimp" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mux#create - /sources/{sourceId}#Mux: + x-speakeasy-entity-operation: Source_Mailchimp#create + /sources/{sourceId}#Mailchimp: get: tags: - "Sources" @@ -17837,10 +19557,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMux" + operationId: "getSourceMailchimp" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mux#read + x-speakeasy-entity-operation: Source_Mailchimp#read put: tags: - "Sources" @@ -17848,7 +19568,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMuxPutRequest" + $ref: "#/components/schemas/SourceMailchimpPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17856,10 +19576,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMux" + operationId: "putSourceMailchimp" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mux#update + x-speakeasy-entity-operation: Source_Mailchimp#update delete: tags: - "Sources" @@ -17870,10 +19590,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMux" + operationId: "deleteSourceMailchimp" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mux#delete + x-speakeasy-entity-operation: Source_Mailchimp#delete parameters: - name: "sourceId" schema: @@ -17881,13 +19601,13 @@ paths: type: "string" in: "path" required: true - /sources#MyHours: + /sources#Mailerlite: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMyHoursCreateRequest" + $ref: "#/components/schemas/SourceMailerliteCreateRequest" tags: - "Sources" responses: @@ -17901,14 +19621,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMyHours" + operationId: "createSourceMailerlite" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MyHours#create - /sources/{sourceId}#MyHours: + x-speakeasy-entity-operation: Source_Mailerlite#create + /sources/{sourceId}#Mailerlite: get: tags: - "Sources" @@ -17923,10 +19643,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMyHours" + operationId: "getSourceMailerlite" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MyHours#read + x-speakeasy-entity-operation: Source_Mailerlite#read put: tags: - "Sources" @@ -17934,7 +19654,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMyHoursPutRequest" + $ref: "#/components/schemas/SourceMailerlitePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -17942,10 +19662,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMyHours" + operationId: "putSourceMailerlite" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MyHours#update + x-speakeasy-entity-operation: Source_Mailerlite#update delete: tags: - "Sources" @@ -17956,10 +19676,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMyHours" + operationId: "deleteSourceMailerlite" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_MyHours#delete + x-speakeasy-entity-operation: Source_Mailerlite#delete parameters: - name: "sourceId" schema: @@ -17967,13 +19687,13 @@ paths: type: "string" in: "path" required: true - /sources#Mysql: + /sources#Mailgun: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceMysqlCreateRequest" + $ref: "#/components/schemas/SourceMailgunCreateRequest" tags: - "Sources" responses: @@ -17987,14 +19707,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceMysql" + operationId: "createSourceMailgun" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mysql#create - /sources/{sourceId}#Mysql: + x-speakeasy-entity-operation: Source_Mailgun#create + /sources/{sourceId}#Mailgun: get: tags: - "Sources" @@ -18009,10 +19729,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceMysql" + operationId: "getSourceMailgun" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mysql#read + x-speakeasy-entity-operation: Source_Mailgun#read put: tags: - "Sources" @@ -18020,7 +19740,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceMysqlPutRequest" + $ref: "#/components/schemas/SourceMailgunPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18028,10 +19748,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceMysql" + operationId: "putSourceMailgun" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mysql#update + x-speakeasy-entity-operation: Source_Mailgun#update delete: tags: - "Sources" @@ -18042,10 +19762,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceMysql" + operationId: "deleteSourceMailgun" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Mysql#delete + x-speakeasy-entity-operation: Source_Mailgun#delete parameters: - name: "sourceId" schema: @@ -18053,13 +19773,13 @@ paths: type: "string" in: "path" required: true - /sources#N8n: + /sources#MailjetMail: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceN8nCreateRequest" + $ref: "#/components/schemas/SourceMailjetMailCreateRequest" tags: - "Sources" responses: @@ -18073,14 +19793,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceN8n" + operationId: "createSourceMailjetMail" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_N8n#create - /sources/{sourceId}#N8n: + x-speakeasy-entity-operation: Source_MailjetMail#create + /sources/{sourceId}#MailjetMail: get: tags: - "Sources" @@ -18095,10 +19815,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceN8n" + operationId: "getSourceMailjetMail" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_N8n#read + x-speakeasy-entity-operation: Source_MailjetMail#read put: tags: - "Sources" @@ -18106,7 +19826,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceN8nPutRequest" + $ref: "#/components/schemas/SourceMailjetMailPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18114,10 +19834,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceN8n" + operationId: "putSourceMailjetMail" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_N8n#update + x-speakeasy-entity-operation: Source_MailjetMail#update delete: tags: - "Sources" @@ -18128,10 +19848,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceN8n" + operationId: "deleteSourceMailjetMail" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_N8n#delete + x-speakeasy-entity-operation: Source_MailjetMail#delete parameters: - name: "sourceId" schema: @@ -18139,13 +19859,13 @@ paths: type: "string" in: "path" required: true - /sources#Nasa: + /sources#MailjetSms: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceNasaCreateRequest" + $ref: "#/components/schemas/SourceMailjetSmsCreateRequest" tags: - "Sources" responses: @@ -18159,14 +19879,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceNasa" + operationId: "createSourceMailjetSms" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nasa#create - /sources/{sourceId}#Nasa: + x-speakeasy-entity-operation: Source_MailjetSms#create + /sources/{sourceId}#MailjetSms: get: tags: - "Sources" @@ -18181,10 +19901,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceNasa" + operationId: "getSourceMailjetSms" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nasa#read + x-speakeasy-entity-operation: Source_MailjetSms#read put: tags: - "Sources" @@ -18192,7 +19912,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceNasaPutRequest" + $ref: "#/components/schemas/SourceMailjetSmsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18200,10 +19920,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceNasa" + operationId: "putSourceMailjetSms" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nasa#update + x-speakeasy-entity-operation: Source_MailjetSms#update delete: tags: - "Sources" @@ -18214,10 +19934,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceNasa" + operationId: "deleteSourceMailjetSms" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nasa#delete + x-speakeasy-entity-operation: Source_MailjetSms#delete parameters: - name: "sourceId" schema: @@ -18225,13 +19945,13 @@ paths: type: "string" in: "path" required: true - /sources#Netsuite: + /sources#Mailosaur: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceNetsuiteCreateRequest" + $ref: "#/components/schemas/SourceMailosaurCreateRequest" tags: - "Sources" responses: @@ -18245,14 +19965,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceNetsuite" + operationId: "createSourceMailosaur" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Netsuite#create - /sources/{sourceId}#Netsuite: + x-speakeasy-entity-operation: Source_Mailosaur#create + /sources/{sourceId}#Mailosaur: get: tags: - "Sources" @@ -18267,10 +19987,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceNetsuite" + operationId: "getSourceMailosaur" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Netsuite#read + x-speakeasy-entity-operation: Source_Mailosaur#read put: tags: - "Sources" @@ -18278,7 +19998,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceNetsuitePutRequest" + $ref: "#/components/schemas/SourceMailosaurPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18286,10 +20006,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceNetsuite" + operationId: "putSourceMailosaur" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Netsuite#update + x-speakeasy-entity-operation: Source_Mailosaur#update delete: tags: - "Sources" @@ -18300,10 +20020,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceNetsuite" + operationId: "deleteSourceMailosaur" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Netsuite#delete + x-speakeasy-entity-operation: Source_Mailosaur#delete parameters: - name: "sourceId" schema: @@ -18311,13 +20031,13 @@ paths: type: "string" in: "path" required: true - /sources#NewsApi: + /sources#Mailtrap: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceNewsApiCreateRequest" + $ref: "#/components/schemas/SourceMailtrapCreateRequest" tags: - "Sources" responses: @@ -18331,14 +20051,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceNewsApi" + operationId: "createSourceMailtrap" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_NewsApi#create - /sources/{sourceId}#NewsApi: + x-speakeasy-entity-operation: Source_Mailtrap#create + /sources/{sourceId}#Mailtrap: get: tags: - "Sources" @@ -18353,10 +20073,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceNewsApi" + operationId: "getSourceMailtrap" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_NewsApi#read + x-speakeasy-entity-operation: Source_Mailtrap#read put: tags: - "Sources" @@ -18364,7 +20084,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceNewsApiPutRequest" + $ref: "#/components/schemas/SourceMailtrapPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18372,10 +20092,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceNewsApi" + operationId: "putSourceMailtrap" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_NewsApi#update + x-speakeasy-entity-operation: Source_Mailtrap#update delete: tags: - "Sources" @@ -18386,10 +20106,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceNewsApi" + operationId: "deleteSourceMailtrap" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_NewsApi#delete + x-speakeasy-entity-operation: Source_Mailtrap#delete parameters: - name: "sourceId" schema: @@ -18397,13 +20117,13 @@ paths: type: "string" in: "path" required: true - /sources#NorthpassLms: + /sources#Marketo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceNorthpassLmsCreateRequest" + $ref: "#/components/schemas/SourceMarketoCreateRequest" tags: - "Sources" responses: @@ -18417,14 +20137,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceNorthpassLms" + operationId: "createSourceMarketo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_NorthpassLms#create - /sources/{sourceId}#NorthpassLms: + x-speakeasy-entity-operation: Source_Marketo#create + /sources/{sourceId}#Marketo: get: tags: - "Sources" @@ -18439,10 +20159,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceNorthpassLms" + operationId: "getSourceMarketo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_NorthpassLms#read + x-speakeasy-entity-operation: Source_Marketo#read put: tags: - "Sources" @@ -18450,7 +20170,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceNorthpassLmsPutRequest" + $ref: "#/components/schemas/SourceMarketoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18458,10 +20178,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceNorthpassLms" + operationId: "putSourceMarketo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_NorthpassLms#update + x-speakeasy-entity-operation: Source_Marketo#update delete: tags: - "Sources" @@ -18472,10 +20192,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceNorthpassLms" + operationId: "deleteSourceMarketo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_NorthpassLms#delete + x-speakeasy-entity-operation: Source_Marketo#delete parameters: - name: "sourceId" schema: @@ -18483,13 +20203,13 @@ paths: type: "string" in: "path" required: true - /sources#Notion: + /sources#Marketstack: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceNotionCreateRequest" + $ref: "#/components/schemas/SourceMarketstackCreateRequest" tags: - "Sources" responses: @@ -18503,14 +20223,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceNotion" + operationId: "createSourceMarketstack" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Notion#create - /sources/{sourceId}#Notion: + x-speakeasy-entity-operation: Source_Marketstack#create + /sources/{sourceId}#Marketstack: get: tags: - "Sources" @@ -18525,10 +20245,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceNotion" + operationId: "getSourceMarketstack" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Notion#read + x-speakeasy-entity-operation: Source_Marketstack#read put: tags: - "Sources" @@ -18536,7 +20256,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceNotionPutRequest" + $ref: "#/components/schemas/SourceMarketstackPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18544,10 +20264,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceNotion" + operationId: "putSourceMarketstack" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Notion#update + x-speakeasy-entity-operation: Source_Marketstack#update delete: tags: - "Sources" @@ -18558,10 +20278,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceNotion" + operationId: "deleteSourceMarketstack" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Notion#delete + x-speakeasy-entity-operation: Source_Marketstack#delete parameters: - name: "sourceId" schema: @@ -18569,13 +20289,13 @@ paths: type: "string" in: "path" required: true - /sources#Nylas: + /sources#Mention: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceNylasCreateRequest" + $ref: "#/components/schemas/SourceMentionCreateRequest" tags: - "Sources" responses: @@ -18589,14 +20309,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceNylas" + operationId: "createSourceMention" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nylas#create - /sources/{sourceId}#Nylas: + x-speakeasy-entity-operation: Source_Mention#create + /sources/{sourceId}#Mention: get: tags: - "Sources" @@ -18611,10 +20331,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceNylas" + operationId: "getSourceMention" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nylas#read + x-speakeasy-entity-operation: Source_Mention#read put: tags: - "Sources" @@ -18622,7 +20342,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceNylasPutRequest" + $ref: "#/components/schemas/SourceMentionPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18630,10 +20350,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceNylas" + operationId: "putSourceMention" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nylas#update + x-speakeasy-entity-operation: Source_Mention#update delete: tags: - "Sources" @@ -18644,10 +20364,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceNylas" + operationId: "deleteSourceMention" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nylas#delete + x-speakeasy-entity-operation: Source_Mention#delete parameters: - name: "sourceId" schema: @@ -18655,13 +20375,13 @@ paths: type: "string" in: "path" required: true - /sources#Nytimes: + /sources#Metabase: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceNytimesCreateRequest" + $ref: "#/components/schemas/SourceMetabaseCreateRequest" tags: - "Sources" responses: @@ -18675,14 +20395,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceNytimes" + operationId: "createSourceMetabase" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nytimes#create - /sources/{sourceId}#Nytimes: + x-speakeasy-entity-operation: Source_Metabase#create + /sources/{sourceId}#Metabase: get: tags: - "Sources" @@ -18697,10 +20417,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceNytimes" + operationId: "getSourceMetabase" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nytimes#read + x-speakeasy-entity-operation: Source_Metabase#read put: tags: - "Sources" @@ -18708,7 +20428,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceNytimesPutRequest" + $ref: "#/components/schemas/SourceMetabasePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18716,10 +20436,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceNytimes" + operationId: "putSourceMetabase" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nytimes#update + x-speakeasy-entity-operation: Source_Metabase#update delete: tags: - "Sources" @@ -18730,10 +20450,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceNytimes" + operationId: "deleteSourceMetabase" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Nytimes#delete + x-speakeasy-entity-operation: Source_Metabase#delete parameters: - name: "sourceId" schema: @@ -18741,13 +20461,13 @@ paths: type: "string" in: "path" required: true - /sources#Okta: + /sources#MicrosoftDataverse: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOktaCreateRequest" + $ref: "#/components/schemas/SourceMicrosoftDataverseCreateRequest" tags: - "Sources" responses: @@ -18761,14 +20481,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOkta" + operationId: "createSourceMicrosoftDataverse" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Okta#create - /sources/{sourceId}#Okta: + x-speakeasy-entity-operation: Source_MicrosoftDataverse#create + /sources/{sourceId}#MicrosoftDataverse: get: tags: - "Sources" @@ -18783,10 +20503,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOkta" + operationId: "getSourceMicrosoftDataverse" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Okta#read + x-speakeasy-entity-operation: Source_MicrosoftDataverse#read put: tags: - "Sources" @@ -18794,7 +20514,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOktaPutRequest" + $ref: "#/components/schemas/SourceMicrosoftDataversePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18802,10 +20522,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOkta" + operationId: "putSourceMicrosoftDataverse" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Okta#update + x-speakeasy-entity-operation: Source_MicrosoftDataverse#update delete: tags: - "Sources" @@ -18816,10 +20536,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOkta" + operationId: "deleteSourceMicrosoftDataverse" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Okta#delete + x-speakeasy-entity-operation: Source_MicrosoftDataverse#delete parameters: - name: "sourceId" schema: @@ -18827,13 +20547,13 @@ paths: type: "string" in: "path" required: true - /sources#Omnisend: + /sources#MicrosoftEntraId: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOmnisendCreateRequest" + $ref: "#/components/schemas/SourceMicrosoftEntraIdCreateRequest" tags: - "Sources" responses: @@ -18847,14 +20567,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOmnisend" + operationId: "createSourceMicrosoftEntraId" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Omnisend#create - /sources/{sourceId}#Omnisend: + x-speakeasy-entity-operation: Source_MicrosoftEntraId#create + /sources/{sourceId}#MicrosoftEntraId: get: tags: - "Sources" @@ -18869,10 +20589,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOmnisend" + operationId: "getSourceMicrosoftEntraId" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Omnisend#read + x-speakeasy-entity-operation: Source_MicrosoftEntraId#read put: tags: - "Sources" @@ -18880,7 +20600,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOmnisendPutRequest" + $ref: "#/components/schemas/SourceMicrosoftEntraIdPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18888,10 +20608,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOmnisend" + operationId: "putSourceMicrosoftEntraId" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Omnisend#update + x-speakeasy-entity-operation: Source_MicrosoftEntraId#update delete: tags: - "Sources" @@ -18902,10 +20622,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOmnisend" + operationId: "deleteSourceMicrosoftEntraId" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Omnisend#delete + x-speakeasy-entity-operation: Source_MicrosoftEntraId#delete parameters: - name: "sourceId" schema: @@ -18913,13 +20633,13 @@ paths: type: "string" in: "path" required: true - /sources#Onesignal: + /sources#MicrosoftLists: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOnesignalCreateRequest" + $ref: "#/components/schemas/SourceMicrosoftListsCreateRequest" tags: - "Sources" responses: @@ -18933,14 +20653,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOnesignal" + operationId: "createSourceMicrosoftLists" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Onesignal#create - /sources/{sourceId}#Onesignal: + x-speakeasy-entity-operation: Source_MicrosoftLists#create + /sources/{sourceId}#MicrosoftLists: get: tags: - "Sources" @@ -18955,10 +20675,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOnesignal" + operationId: "getSourceMicrosoftLists" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Onesignal#read + x-speakeasy-entity-operation: Source_MicrosoftLists#read put: tags: - "Sources" @@ -18966,7 +20686,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOnesignalPutRequest" + $ref: "#/components/schemas/SourceMicrosoftListsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -18974,10 +20694,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOnesignal" + operationId: "putSourceMicrosoftLists" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Onesignal#update + x-speakeasy-entity-operation: Source_MicrosoftLists#update delete: tags: - "Sources" @@ -18988,10 +20708,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOnesignal" + operationId: "deleteSourceMicrosoftLists" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Onesignal#delete + x-speakeasy-entity-operation: Source_MicrosoftLists#delete parameters: - name: "sourceId" schema: @@ -18999,13 +20719,13 @@ paths: type: "string" in: "path" required: true - /sources#OpenDataDc: + /sources#MicrosoftOnedrive: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOpenDataDcCreateRequest" + $ref: "#/components/schemas/SourceMicrosoftOnedriveCreateRequest" tags: - "Sources" responses: @@ -19019,14 +20739,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOpenDataDc" + operationId: "createSourceMicrosoftOnedrive" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_OpenDataDc#create - /sources/{sourceId}#OpenDataDc: + x-speakeasy-entity-operation: Source_MicrosoftOnedrive#create + /sources/{sourceId}#MicrosoftOnedrive: get: tags: - "Sources" @@ -19041,10 +20761,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOpenDataDc" + operationId: "getSourceMicrosoftOnedrive" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_OpenDataDc#read + x-speakeasy-entity-operation: Source_MicrosoftOnedrive#read put: tags: - "Sources" @@ -19052,7 +20772,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOpenDataDcPutRequest" + $ref: "#/components/schemas/SourceMicrosoftOnedrivePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19060,10 +20780,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOpenDataDc" + operationId: "putSourceMicrosoftOnedrive" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_OpenDataDc#update + x-speakeasy-entity-operation: Source_MicrosoftOnedrive#update delete: tags: - "Sources" @@ -19074,10 +20794,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOpenDataDc" + operationId: "deleteSourceMicrosoftOnedrive" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_OpenDataDc#delete + x-speakeasy-entity-operation: Source_MicrosoftOnedrive#delete parameters: - name: "sourceId" schema: @@ -19085,13 +20805,13 @@ paths: type: "string" in: "path" required: true - /sources#Openweather: + /sources#MicrosoftSharepoint: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOpenweatherCreateRequest" + $ref: "#/components/schemas/SourceMicrosoftSharepointCreateRequest" tags: - "Sources" responses: @@ -19105,14 +20825,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOpenweather" + operationId: "createSourceMicrosoftSharepoint" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Openweather#create - /sources/{sourceId}#Openweather: + x-speakeasy-entity-operation: Source_MicrosoftSharepoint#create + /sources/{sourceId}#MicrosoftSharepoint: get: tags: - "Sources" @@ -19127,10 +20847,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOpenweather" + operationId: "getSourceMicrosoftSharepoint" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Openweather#read + x-speakeasy-entity-operation: Source_MicrosoftSharepoint#read put: tags: - "Sources" @@ -19138,7 +20858,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOpenweatherPutRequest" + $ref: "#/components/schemas/SourceMicrosoftSharepointPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19146,10 +20866,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOpenweather" + operationId: "putSourceMicrosoftSharepoint" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Openweather#update + x-speakeasy-entity-operation: Source_MicrosoftSharepoint#update delete: tags: - "Sources" @@ -19160,10 +20880,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOpenweather" + operationId: "deleteSourceMicrosoftSharepoint" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Openweather#delete + x-speakeasy-entity-operation: Source_MicrosoftSharepoint#delete parameters: - name: "sourceId" schema: @@ -19171,13 +20891,13 @@ paths: type: "string" in: "path" required: true - /sources#Opsgenie: + /sources#MicrosoftTeams: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOpsgenieCreateRequest" + $ref: "#/components/schemas/SourceMicrosoftTeamsCreateRequest" tags: - "Sources" responses: @@ -19191,14 +20911,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOpsgenie" + operationId: "createSourceMicrosoftTeams" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Opsgenie#create - /sources/{sourceId}#Opsgenie: + x-speakeasy-entity-operation: Source_MicrosoftTeams#create + /sources/{sourceId}#MicrosoftTeams: get: tags: - "Sources" @@ -19213,10 +20933,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOpsgenie" + operationId: "getSourceMicrosoftTeams" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Opsgenie#read + x-speakeasy-entity-operation: Source_MicrosoftTeams#read put: tags: - "Sources" @@ -19224,7 +20944,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOpsgeniePutRequest" + $ref: "#/components/schemas/SourceMicrosoftTeamsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19232,10 +20952,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOpsgenie" + operationId: "putSourceMicrosoftTeams" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Opsgenie#update + x-speakeasy-entity-operation: Source_MicrosoftTeams#update delete: tags: - "Sources" @@ -19246,10 +20966,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOpsgenie" + operationId: "deleteSourceMicrosoftTeams" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Opsgenie#delete + x-speakeasy-entity-operation: Source_MicrosoftTeams#delete parameters: - name: "sourceId" schema: @@ -19257,13 +20977,13 @@ paths: type: "string" in: "path" required: true - /sources#Oracle: + /sources#Miro: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOracleCreateRequest" + $ref: "#/components/schemas/SourceMiroCreateRequest" tags: - "Sources" responses: @@ -19277,14 +20997,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOracle" + operationId: "createSourceMiro" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Oracle#create - /sources/{sourceId}#Oracle: + x-speakeasy-entity-operation: Source_Miro#create + /sources/{sourceId}#Miro: get: tags: - "Sources" @@ -19299,10 +21019,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOracle" + operationId: "getSourceMiro" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Oracle#read + x-speakeasy-entity-operation: Source_Miro#read put: tags: - "Sources" @@ -19310,7 +21030,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOraclePutRequest" + $ref: "#/components/schemas/SourceMiroPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19318,10 +21038,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOracle" + operationId: "putSourceMiro" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Oracle#update + x-speakeasy-entity-operation: Source_Miro#update delete: tags: - "Sources" @@ -19332,10 +21052,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOracle" + operationId: "deleteSourceMiro" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Oracle#delete + x-speakeasy-entity-operation: Source_Miro#delete parameters: - name: "sourceId" schema: @@ -19343,13 +21063,13 @@ paths: type: "string" in: "path" required: true - /sources#Orb: + /sources#Missive: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOrbCreateRequest" + $ref: "#/components/schemas/SourceMissiveCreateRequest" tags: - "Sources" responses: @@ -19363,14 +21083,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOrb" + operationId: "createSourceMissive" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Orb#create - /sources/{sourceId}#Orb: + x-speakeasy-entity-operation: Source_Missive#create + /sources/{sourceId}#Missive: get: tags: - "Sources" @@ -19385,10 +21105,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOrb" + operationId: "getSourceMissive" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Orb#read + x-speakeasy-entity-operation: Source_Missive#read put: tags: - "Sources" @@ -19396,7 +21116,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOrbPutRequest" + $ref: "#/components/schemas/SourceMissivePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19404,10 +21124,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOrb" + operationId: "putSourceMissive" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Orb#update + x-speakeasy-entity-operation: Source_Missive#update delete: tags: - "Sources" @@ -19418,10 +21138,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOrb" + operationId: "deleteSourceMissive" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Orb#delete + x-speakeasy-entity-operation: Source_Missive#delete parameters: - name: "sourceId" schema: @@ -19429,13 +21149,13 @@ paths: type: "string" in: "path" required: true - /sources#Orbit: + /sources#Mixmax: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOrbitCreateRequest" + $ref: "#/components/schemas/SourceMixmaxCreateRequest" tags: - "Sources" responses: @@ -19449,14 +21169,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOrbit" + operationId: "createSourceMixmax" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Orbit#create - /sources/{sourceId}#Orbit: + x-speakeasy-entity-operation: Source_Mixmax#create + /sources/{sourceId}#Mixmax: get: tags: - "Sources" @@ -19471,10 +21191,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOrbit" + operationId: "getSourceMixmax" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Orbit#read + x-speakeasy-entity-operation: Source_Mixmax#read put: tags: - "Sources" @@ -19482,7 +21202,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOrbitPutRequest" + $ref: "#/components/schemas/SourceMixmaxPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19490,10 +21210,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOrbit" + operationId: "putSourceMixmax" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Orbit#update + x-speakeasy-entity-operation: Source_Mixmax#update delete: tags: - "Sources" @@ -19504,10 +21224,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOrbit" + operationId: "deleteSourceMixmax" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Orbit#delete + x-speakeasy-entity-operation: Source_Mixmax#delete parameters: - name: "sourceId" schema: @@ -19515,13 +21235,13 @@ paths: type: "string" in: "path" required: true - /sources#Oura: + /sources#Mixpanel: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOuraCreateRequest" + $ref: "#/components/schemas/SourceMixpanelCreateRequest" tags: - "Sources" responses: @@ -19535,14 +21255,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOura" + operationId: "createSourceMixpanel" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Oura#create - /sources/{sourceId}#Oura: + x-speakeasy-entity-operation: Source_Mixpanel#create + /sources/{sourceId}#Mixpanel: get: tags: - "Sources" @@ -19557,10 +21277,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOura" + operationId: "getSourceMixpanel" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Oura#read + x-speakeasy-entity-operation: Source_Mixpanel#read put: tags: - "Sources" @@ -19568,7 +21288,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOuraPutRequest" + $ref: "#/components/schemas/SourceMixpanelPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19576,10 +21296,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOura" + operationId: "putSourceMixpanel" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Oura#update + x-speakeasy-entity-operation: Source_Mixpanel#update delete: tags: - "Sources" @@ -19590,10 +21310,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOura" + operationId: "deleteSourceMixpanel" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Oura#delete + x-speakeasy-entity-operation: Source_Mixpanel#delete parameters: - name: "sourceId" schema: @@ -19601,13 +21321,2937 @@ paths: type: "string" in: "path" required: true - /sources#OutbrainAmplify: + /sources#Mode: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOutbrainAmplifyCreateRequest" + $ref: "#/components/schemas/SourceModeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMode" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mode#create + /sources/{sourceId}#Mode: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMode" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mode#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceModePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMode" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mode#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMode" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mode#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Monday: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMondayCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMonday" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Monday#create + /sources/{sourceId}#Monday: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMonday" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Monday#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMondayPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMonday" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Monday#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMonday" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Monday#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#MongodbV2: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMongodbV2CreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMongodbV2" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MongodbV2#create + /sources/{sourceId}#MongodbV2: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMongodbV2" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MongodbV2#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMongodbV2PutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMongodbV2" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MongodbV2#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMongodbV2" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MongodbV2#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Mssql: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMssqlCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMssql" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mssql#create + /sources/{sourceId}#Mssql: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMssql" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mssql#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMssqlPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMssql" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mssql#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMssql" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mssql#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Mux: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMuxCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMux" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mux#create + /sources/{sourceId}#Mux: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMux" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mux#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMuxPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMux" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mux#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMux" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mux#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#MyHours: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMyHoursCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMyHours" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MyHours#create + /sources/{sourceId}#MyHours: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMyHours" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MyHours#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMyHoursPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMyHours" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MyHours#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMyHours" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MyHours#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Mysql: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMysqlCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMysql" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mysql#create + /sources/{sourceId}#Mysql: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMysql" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mysql#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMysqlPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMysql" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mysql#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMysql" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mysql#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#N8n: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceN8nCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceN8n" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_N8n#create + /sources/{sourceId}#N8n: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceN8n" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_N8n#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceN8nPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceN8n" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_N8n#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceN8n" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_N8n#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Nasa: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNasaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNasa" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nasa#create + /sources/{sourceId}#Nasa: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNasa" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nasa#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNasaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNasa" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nasa#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNasa" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nasa#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Netsuite: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNetsuiteCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNetsuite" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Netsuite#create + /sources/{sourceId}#Netsuite: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNetsuite" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Netsuite#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNetsuitePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNetsuite" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Netsuite#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNetsuite" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Netsuite#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#NewsApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNewsApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNewsApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NewsApi#create + /sources/{sourceId}#NewsApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNewsApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NewsApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNewsApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNewsApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NewsApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNewsApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NewsApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#NewsdataIo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNewsdataIoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNewsdataIo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NewsdataIo#create + /sources/{sourceId}#NewsdataIo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNewsdataIo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NewsdataIo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNewsdataIoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNewsdataIo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NewsdataIo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNewsdataIo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NewsdataIo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Nocrm: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNocrmCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNocrm" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nocrm#create + /sources/{sourceId}#Nocrm: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNocrm" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nocrm#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNocrmPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNocrm" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nocrm#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNocrm" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nocrm#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#NorthpassLms: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNorthpassLmsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNorthpassLms" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NorthpassLms#create + /sources/{sourceId}#NorthpassLms: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNorthpassLms" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NorthpassLms#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNorthpassLmsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNorthpassLms" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NorthpassLms#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNorthpassLms" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NorthpassLms#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Notion: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNotionCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNotion" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Notion#create + /sources/{sourceId}#Notion: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNotion" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Notion#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNotionPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNotion" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Notion#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNotion" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Notion#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Nutshell: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNutshellCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNutshell" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nutshell#create + /sources/{sourceId}#Nutshell: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNutshell" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nutshell#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNutshellPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNutshell" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nutshell#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNutshell" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nutshell#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Nylas: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNylasCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNylas" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nylas#create + /sources/{sourceId}#Nylas: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNylas" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nylas#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNylasPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNylas" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nylas#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNylas" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nylas#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Nytimes: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNytimesCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNytimes" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nytimes#create + /sources/{sourceId}#Nytimes: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNytimes" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nytimes#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNytimesPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNytimes" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nytimes#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNytimes" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nytimes#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Okta: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOktaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOkta" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Okta#create + /sources/{sourceId}#Okta: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOkta" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Okta#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOktaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOkta" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Okta#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOkta" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Okta#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Omnisend: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOmnisendCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOmnisend" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Omnisend#create + /sources/{sourceId}#Omnisend: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOmnisend" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Omnisend#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOmnisendPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOmnisend" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Omnisend#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOmnisend" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Omnisend#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Oncehub: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOncehubCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOncehub" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oncehub#create + /sources/{sourceId}#Oncehub: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOncehub" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oncehub#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOncehubPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOncehub" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oncehub#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOncehub" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oncehub#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Onepagecrm: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOnepagecrmCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOnepagecrm" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onepagecrm#create + /sources/{sourceId}#Onepagecrm: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOnepagecrm" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onepagecrm#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOnepagecrmPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOnepagecrm" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onepagecrm#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOnepagecrm" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onepagecrm#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Onesignal: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOnesignalCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOnesignal" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onesignal#create + /sources/{sourceId}#Onesignal: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOnesignal" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onesignal#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOnesignalPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOnesignal" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onesignal#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOnesignal" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onesignal#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Onfleet: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOnfleetCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOnfleet" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onfleet#create + /sources/{sourceId}#Onfleet: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOnfleet" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onfleet#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOnfleetPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOnfleet" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onfleet#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOnfleet" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onfleet#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#OpenDataDc: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpenDataDcCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOpenDataDc" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OpenDataDc#create + /sources/{sourceId}#OpenDataDc: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOpenDataDc" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OpenDataDc#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpenDataDcPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOpenDataDc" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OpenDataDc#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOpenDataDc" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OpenDataDc#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Openaq: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpenaqCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOpenaq" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openaq#create + /sources/{sourceId}#Openaq: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOpenaq" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openaq#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpenaqPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOpenaq" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openaq#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOpenaq" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openaq#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Openfda: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpenfdaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOpenfda" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openfda#create + /sources/{sourceId}#Openfda: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOpenfda" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openfda#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpenfdaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOpenfda" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openfda#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOpenfda" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openfda#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Openweather: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpenweatherCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOpenweather" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openweather#create + /sources/{sourceId}#Openweather: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOpenweather" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openweather#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpenweatherPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOpenweather" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openweather#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOpenweather" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Openweather#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#OpinionStage: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpinionStageCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOpinionStage" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OpinionStage#create + /sources/{sourceId}#OpinionStage: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOpinionStage" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OpinionStage#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpinionStagePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOpinionStage" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OpinionStage#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOpinionStage" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OpinionStage#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Opsgenie: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpsgenieCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOpsgenie" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Opsgenie#create + /sources/{sourceId}#Opsgenie: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOpsgenie" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Opsgenie#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOpsgeniePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOpsgenie" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Opsgenie#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOpsgenie" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Opsgenie#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Oracle: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOracleCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOracle" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oracle#create + /sources/{sourceId}#Oracle: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOracle" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oracle#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOraclePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOracle" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oracle#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOracle" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oracle#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Orb: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOrbCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOrb" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orb#create + /sources/{sourceId}#Orb: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOrb" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orb#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOrbPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOrb" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orb#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOrb" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orb#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Orbit: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOrbitCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOrbit" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orbit#create + /sources/{sourceId}#Orbit: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOrbit" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orbit#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOrbitPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOrbit" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orbit#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOrbit" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orbit#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Oura: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOuraCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOura" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oura#create + /sources/{sourceId}#Oura: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOura" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oura#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOuraPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOura" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oura#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOura" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oura#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#OutbrainAmplify: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOutbrainAmplifyCreateRequest" tags: - "Sources" responses: @@ -19627,8 +24271,2674 @@ paths: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_OutbrainAmplify#create - /sources/{sourceId}#OutbrainAmplify: + x-speakeasy-entity-operation: Source_OutbrainAmplify#create + /sources/{sourceId}#OutbrainAmplify: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOutbrainAmplify" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OutbrainAmplify#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOutbrainAmplifyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOutbrainAmplify" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OutbrainAmplify#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOutbrainAmplify" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OutbrainAmplify#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Outreach: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOutreachCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOutreach" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Outreach#create + /sources/{sourceId}#Outreach: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOutreach" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Outreach#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOutreachPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOutreach" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Outreach#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOutreach" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Outreach#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Oveit: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOveitCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOveit" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oveit#create + /sources/{sourceId}#Oveit: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOveit" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oveit#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOveitPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOveit" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oveit#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOveit" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oveit#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#PabblySubscriptionsBilling: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePabblySubscriptionsBillingCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePabblySubscriptionsBilling" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PabblySubscriptionsBilling#create + /sources/{sourceId}#PabblySubscriptionsBilling: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePabblySubscriptionsBilling" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PabblySubscriptionsBilling#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePabblySubscriptionsBillingPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePabblySubscriptionsBilling" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PabblySubscriptionsBilling#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePabblySubscriptionsBilling" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PabblySubscriptionsBilling#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pandadoc: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePandadocCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePandadoc" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pandadoc#create + /sources/{sourceId}#Pandadoc: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePandadoc" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pandadoc#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePandadocPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePandadoc" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pandadoc#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePandadoc" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pandadoc#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Paperform: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaperformCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePaperform" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paperform#create + /sources/{sourceId}#Paperform: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePaperform" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paperform#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaperformPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePaperform" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paperform#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePaperform" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paperform#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Papersign: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePapersignCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePapersign" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Papersign#create + /sources/{sourceId}#Papersign: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePapersign" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Papersign#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePapersignPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePapersign" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Papersign#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePapersign" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Papersign#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pardot: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePardotCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePardot" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pardot#create + /sources/{sourceId}#Pardot: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePardot" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pardot#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePardotPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePardot" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pardot#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePardot" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pardot#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#PaypalTransaction: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaypalTransactionCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePaypalTransaction" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PaypalTransaction#create + /sources/{sourceId}#PaypalTransaction: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePaypalTransaction" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PaypalTransaction#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaypalTransactionPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePaypalTransaction" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PaypalTransaction#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePaypalTransaction" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PaypalTransaction#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Paystack: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaystackCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePaystack" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paystack#create + /sources/{sourceId}#Paystack: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePaystack" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paystack#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaystackPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePaystack" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paystack#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePaystack" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paystack#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pendo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePendoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePendo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pendo#create + /sources/{sourceId}#Pendo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePendo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pendo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePendoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePendo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pendo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePendo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pendo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pennylane: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePennylaneCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePennylane" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pennylane#create + /sources/{sourceId}#Pennylane: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePennylane" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pennylane#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePennylanePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePennylane" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pennylane#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePennylane" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pennylane#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Persistiq: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePersistiqCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePersistiq" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persistiq#create + /sources/{sourceId}#Persistiq: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePersistiq" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persistiq#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePersistiqPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePersistiq" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persistiq#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePersistiq" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persistiq#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Persona: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePersonaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePersona" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persona#create + /sources/{sourceId}#Persona: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePersona" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persona#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePersonaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePersona" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persona#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePersona" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persona#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#PexelsApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePexelsApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePexelsApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PexelsApi#create + /sources/{sourceId}#PexelsApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePexelsApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PexelsApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePexelsApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePexelsApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PexelsApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePexelsApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PexelsApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Picqer: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePicqerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePicqer" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Picqer#create + /sources/{sourceId}#Picqer: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePicqer" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Picqer#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePicqerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePicqer" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Picqer#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePicqer" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Picqer#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pinterest: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePinterestCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePinterest" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pinterest#create + /sources/{sourceId}#Pinterest: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePinterest" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pinterest#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePinterestPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePinterest" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pinterest#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePinterest" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pinterest#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pipedrive: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePipedriveCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePipedrive" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipedrive#create + /sources/{sourceId}#Pipedrive: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePipedrive" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipedrive#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePipedrivePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePipedrive" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipedrive#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePipedrive" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipedrive#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pipeliner: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePipelinerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePipeliner" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipeliner#create + /sources/{sourceId}#Pipeliner: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePipeliner" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipeliner#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePipelinerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePipeliner" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipeliner#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePipeliner" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipeliner#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#PivotalTracker: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePivotalTrackerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePivotalTracker" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PivotalTracker#create + /sources/{sourceId}#PivotalTracker: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePivotalTracker" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PivotalTracker#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePivotalTrackerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePivotalTracker" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PivotalTracker#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePivotalTracker" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PivotalTracker#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Piwik: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePiwikCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePiwik" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Piwik#create + /sources/{sourceId}#Piwik: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePiwik" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Piwik#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePiwikPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePiwik" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Piwik#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePiwik" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Piwik#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Plaid: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePlaidCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePlaid" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Plaid#create + /sources/{sourceId}#Plaid: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePlaid" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Plaid#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePlaidPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePlaid" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Plaid#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePlaid" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Plaid#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Planhat: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePlanhatCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePlanhat" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Planhat#create + /sources/{sourceId}#Planhat: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePlanhat" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Planhat#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePlanhatPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePlanhat" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Planhat#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePlanhat" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Planhat#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Plausible: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePlausibleCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePlausible" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Plausible#create + /sources/{sourceId}#Plausible: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePlausible" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Plausible#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePlausiblePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePlausible" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Plausible#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePlausible" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Plausible#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pocket: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePocketCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePocket" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pocket#create + /sources/{sourceId}#Pocket: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePocket" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pocket#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePocketPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePocket" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pocket#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePocket" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pocket#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pokeapi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePokeapiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePokeapi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pokeapi#create + /sources/{sourceId}#Pokeapi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePokeapi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pokeapi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePokeapiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePokeapi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pokeapi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePokeapi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pokeapi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#PolygonStockApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePolygonStockApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePolygonStockApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PolygonStockApi#create + /sources/{sourceId}#PolygonStockApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePolygonStockApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PolygonStockApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePolygonStockApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePolygonStockApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PolygonStockApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePolygonStockApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PolygonStockApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Postgres: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePostgresCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePostgres" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postgres#create + /sources/{sourceId}#Postgres: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePostgres" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postgres#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePostgresPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePostgres" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postgres#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePostgres" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postgres#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Posthog: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePosthogCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePosthog" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Posthog#create + /sources/{sourceId}#Posthog: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePosthog" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Posthog#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePosthogPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePosthog" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Posthog#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePosthog" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Posthog#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Postmarkapp: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePostmarkappCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePostmarkapp" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postmarkapp#create + /sources/{sourceId}#Postmarkapp: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePostmarkapp" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postmarkapp#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePostmarkappPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePostmarkapp" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postmarkapp#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePostmarkapp" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postmarkapp#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Prestashop: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePrestashopCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePrestashop" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Prestashop#create + /sources/{sourceId}#Prestashop: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePrestashop" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Prestashop#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePrestashopPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePrestashop" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Prestashop#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePrestashop" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Prestashop#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pretix: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePretixCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePretix" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pretix#create + /sources/{sourceId}#Pretix: get: tags: - "Sources" @@ -19643,10 +26953,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOutbrainAmplify" + operationId: "getSourcePretix" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_OutbrainAmplify#read + x-speakeasy-entity-operation: Source_Pretix#read put: tags: - "Sources" @@ -19654,7 +26964,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOutbrainAmplifyPutRequest" + $ref: "#/components/schemas/SourcePretixPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19662,10 +26972,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOutbrainAmplify" + operationId: "putSourcePretix" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_OutbrainAmplify#update + x-speakeasy-entity-operation: Source_Pretix#update delete: tags: - "Sources" @@ -19676,10 +26986,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOutbrainAmplify" + operationId: "deleteSourcePretix" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_OutbrainAmplify#delete + x-speakeasy-entity-operation: Source_Pretix#delete parameters: - name: "sourceId" schema: @@ -19687,13 +26997,13 @@ paths: type: "string" in: "path" required: true - /sources#Outreach: + /sources#Primetric: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceOutreachCreateRequest" + $ref: "#/components/schemas/SourcePrimetricCreateRequest" tags: - "Sources" responses: @@ -19707,14 +27017,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceOutreach" + operationId: "createSourcePrimetric" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Outreach#create - /sources/{sourceId}#Outreach: + x-speakeasy-entity-operation: Source_Primetric#create + /sources/{sourceId}#Primetric: get: tags: - "Sources" @@ -19729,10 +27039,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceOutreach" + operationId: "getSourcePrimetric" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Outreach#read + x-speakeasy-entity-operation: Source_Primetric#read put: tags: - "Sources" @@ -19740,7 +27050,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceOutreachPutRequest" + $ref: "#/components/schemas/SourcePrimetricPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19748,10 +27058,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceOutreach" + operationId: "putSourcePrimetric" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Outreach#update + x-speakeasy-entity-operation: Source_Primetric#update delete: tags: - "Sources" @@ -19762,10 +27072,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceOutreach" + operationId: "deleteSourcePrimetric" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Outreach#delete + x-speakeasy-entity-operation: Source_Primetric#delete parameters: - name: "sourceId" schema: @@ -19773,13 +27083,13 @@ paths: type: "string" in: "path" required: true - /sources#Pandadoc: + /sources#Productboard: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePandadocCreateRequest" + $ref: "#/components/schemas/SourceProductboardCreateRequest" tags: - "Sources" responses: @@ -19793,14 +27103,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePandadoc" + operationId: "createSourceProductboard" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pandadoc#create - /sources/{sourceId}#Pandadoc: + x-speakeasy-entity-operation: Source_Productboard#create + /sources/{sourceId}#Productboard: get: tags: - "Sources" @@ -19815,10 +27125,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePandadoc" + operationId: "getSourceProductboard" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pandadoc#read + x-speakeasy-entity-operation: Source_Productboard#read put: tags: - "Sources" @@ -19826,7 +27136,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePandadocPutRequest" + $ref: "#/components/schemas/SourceProductboardPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19834,10 +27144,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePandadoc" + operationId: "putSourceProductboard" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pandadoc#update + x-speakeasy-entity-operation: Source_Productboard#update delete: tags: - "Sources" @@ -19848,10 +27158,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePandadoc" + operationId: "deleteSourceProductboard" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pandadoc#delete + x-speakeasy-entity-operation: Source_Productboard#delete parameters: - name: "sourceId" schema: @@ -19859,13 +27169,13 @@ paths: type: "string" in: "path" required: true - /sources#Pardot: + /sources#Productive: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePardotCreateRequest" + $ref: "#/components/schemas/SourceProductiveCreateRequest" tags: - "Sources" responses: @@ -19879,14 +27189,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePardot" + operationId: "createSourceProductive" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pardot#create - /sources/{sourceId}#Pardot: + x-speakeasy-entity-operation: Source_Productive#create + /sources/{sourceId}#Productive: get: tags: - "Sources" @@ -19901,10 +27211,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePardot" + operationId: "getSourceProductive" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pardot#read + x-speakeasy-entity-operation: Source_Productive#read put: tags: - "Sources" @@ -19912,7 +27222,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePardotPutRequest" + $ref: "#/components/schemas/SourceProductivePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -19920,10 +27230,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePardot" + operationId: "putSourceProductive" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pardot#update + x-speakeasy-entity-operation: Source_Productive#update delete: tags: - "Sources" @@ -19934,10 +27244,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePardot" + operationId: "deleteSourceProductive" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pardot#delete + x-speakeasy-entity-operation: Source_Productive#delete parameters: - name: "sourceId" schema: @@ -19945,13 +27255,13 @@ paths: type: "string" in: "path" required: true - /sources#PaypalTransaction: + /sources#Pypi: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePaypalTransactionCreateRequest" + $ref: "#/components/schemas/SourcePypiCreateRequest" tags: - "Sources" responses: @@ -19965,14 +27275,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePaypalTransaction" + operationId: "createSourcePypi" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PaypalTransaction#create - /sources/{sourceId}#PaypalTransaction: + x-speakeasy-entity-operation: Source_Pypi#create + /sources/{sourceId}#Pypi: get: tags: - "Sources" @@ -19987,10 +27297,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePaypalTransaction" + operationId: "getSourcePypi" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PaypalTransaction#read + x-speakeasy-entity-operation: Source_Pypi#read put: tags: - "Sources" @@ -19998,7 +27308,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePaypalTransactionPutRequest" + $ref: "#/components/schemas/SourcePypiPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20006,10 +27316,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePaypalTransaction" + operationId: "putSourcePypi" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PaypalTransaction#update + x-speakeasy-entity-operation: Source_Pypi#update delete: tags: - "Sources" @@ -20020,10 +27330,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePaypalTransaction" + operationId: "deleteSourcePypi" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PaypalTransaction#delete + x-speakeasy-entity-operation: Source_Pypi#delete parameters: - name: "sourceId" schema: @@ -20031,13 +27341,13 @@ paths: type: "string" in: "path" required: true - /sources#Paystack: + /sources#Qualaroo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePaystackCreateRequest" + $ref: "#/components/schemas/SourceQualarooCreateRequest" tags: - "Sources" responses: @@ -20051,14 +27361,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePaystack" + operationId: "createSourceQualaroo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Paystack#create - /sources/{sourceId}#Paystack: + x-speakeasy-entity-operation: Source_Qualaroo#create + /sources/{sourceId}#Qualaroo: get: tags: - "Sources" @@ -20073,10 +27383,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePaystack" + operationId: "getSourceQualaroo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Paystack#read + x-speakeasy-entity-operation: Source_Qualaroo#read put: tags: - "Sources" @@ -20084,7 +27394,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePaystackPutRequest" + $ref: "#/components/schemas/SourceQualarooPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20092,10 +27402,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePaystack" + operationId: "putSourceQualaroo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Paystack#update + x-speakeasy-entity-operation: Source_Qualaroo#update delete: tags: - "Sources" @@ -20106,10 +27416,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePaystack" + operationId: "deleteSourceQualaroo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Paystack#delete + x-speakeasy-entity-operation: Source_Qualaroo#delete parameters: - name: "sourceId" schema: @@ -20117,13 +27427,13 @@ paths: type: "string" in: "path" required: true - /sources#Pendo: + /sources#Quickbooks: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePendoCreateRequest" + $ref: "#/components/schemas/SourceQuickbooksCreateRequest" tags: - "Sources" responses: @@ -20137,14 +27447,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePendo" + operationId: "createSourceQuickbooks" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pendo#create - /sources/{sourceId}#Pendo: + x-speakeasy-entity-operation: Source_Quickbooks#create + /sources/{sourceId}#Quickbooks: get: tags: - "Sources" @@ -20159,10 +27469,354 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePendo" + operationId: "getSourceQuickbooks" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Quickbooks#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceQuickbooksPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceQuickbooks" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Quickbooks#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceQuickbooks" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Quickbooks#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Railz: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRailzCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRailz" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Railz#create + /sources/{sourceId}#Railz: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRailz" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Railz#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRailzPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRailz" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Railz#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRailz" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Railz#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#RdStationMarketing: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRdStationMarketingCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRdStationMarketing" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_RdStationMarketing#create + /sources/{sourceId}#RdStationMarketing: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRdStationMarketing" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_RdStationMarketing#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRdStationMarketingPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRdStationMarketing" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_RdStationMarketing#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRdStationMarketing" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_RdStationMarketing#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Recharge: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRechargeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRecharge" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recharge#create + /sources/{sourceId}#Recharge: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRecharge" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recharge#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRechargePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRecharge" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recharge#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRecharge" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recharge#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Recreation: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRecreationCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRecreation" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recreation#create + /sources/{sourceId}#Recreation: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRecreation" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pendo#read + x-speakeasy-entity-operation: Source_Recreation#read put: tags: - "Sources" @@ -20170,7 +27824,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePendoPutRequest" + $ref: "#/components/schemas/SourceRecreationPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20178,10 +27832,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePendo" + operationId: "putSourceRecreation" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pendo#update + x-speakeasy-entity-operation: Source_Recreation#update delete: tags: - "Sources" @@ -20192,10 +27846,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePendo" + operationId: "deleteSourceRecreation" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pendo#delete + x-speakeasy-entity-operation: Source_Recreation#delete parameters: - name: "sourceId" schema: @@ -20203,13 +27857,13 @@ paths: type: "string" in: "path" required: true - /sources#Pennylane: + /sources#Recruitee: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePennylaneCreateRequest" + $ref: "#/components/schemas/SourceRecruiteeCreateRequest" tags: - "Sources" responses: @@ -20223,14 +27877,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePennylane" + operationId: "createSourceRecruitee" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pennylane#create - /sources/{sourceId}#Pennylane: + x-speakeasy-entity-operation: Source_Recruitee#create + /sources/{sourceId}#Recruitee: get: tags: - "Sources" @@ -20245,10 +27899,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePennylane" + operationId: "getSourceRecruitee" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pennylane#read + x-speakeasy-entity-operation: Source_Recruitee#read put: tags: - "Sources" @@ -20256,7 +27910,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePennylanePutRequest" + $ref: "#/components/schemas/SourceRecruiteePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20264,10 +27918,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePennylane" + operationId: "putSourceRecruitee" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pennylane#update + x-speakeasy-entity-operation: Source_Recruitee#update delete: tags: - "Sources" @@ -20278,10 +27932,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePennylane" + operationId: "deleteSourceRecruitee" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pennylane#delete + x-speakeasy-entity-operation: Source_Recruitee#delete parameters: - name: "sourceId" schema: @@ -20289,13 +27943,13 @@ paths: type: "string" in: "path" required: true - /sources#Persistiq: + /sources#Recurly: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePersistiqCreateRequest" + $ref: "#/components/schemas/SourceRecurlyCreateRequest" tags: - "Sources" responses: @@ -20309,14 +27963,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePersistiq" + operationId: "createSourceRecurly" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Persistiq#create - /sources/{sourceId}#Persistiq: + x-speakeasy-entity-operation: Source_Recurly#create + /sources/{sourceId}#Recurly: get: tags: - "Sources" @@ -20331,10 +27985,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePersistiq" + operationId: "getSourceRecurly" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Persistiq#read + x-speakeasy-entity-operation: Source_Recurly#read put: tags: - "Sources" @@ -20342,7 +27996,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePersistiqPutRequest" + $ref: "#/components/schemas/SourceRecurlyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20350,10 +28004,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePersistiq" + operationId: "putSourceRecurly" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Persistiq#update + x-speakeasy-entity-operation: Source_Recurly#update delete: tags: - "Sources" @@ -20364,10 +28018,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePersistiq" + operationId: "deleteSourceRecurly" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Persistiq#delete + x-speakeasy-entity-operation: Source_Recurly#delete parameters: - name: "sourceId" schema: @@ -20375,13 +28029,13 @@ paths: type: "string" in: "path" required: true - /sources#Persona: + /sources#Reddit: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePersonaCreateRequest" + $ref: "#/components/schemas/SourceRedditCreateRequest" tags: - "Sources" responses: @@ -20395,14 +28049,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePersona" + operationId: "createSourceReddit" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Persona#create - /sources/{sourceId}#Persona: + x-speakeasy-entity-operation: Source_Reddit#create + /sources/{sourceId}#Reddit: get: tags: - "Sources" @@ -20417,10 +28071,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePersona" + operationId: "getSourceReddit" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Persona#read + x-speakeasy-entity-operation: Source_Reddit#read put: tags: - "Sources" @@ -20428,7 +28082,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePersonaPutRequest" + $ref: "#/components/schemas/SourceRedditPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20436,10 +28090,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePersona" + operationId: "putSourceReddit" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Persona#update + x-speakeasy-entity-operation: Source_Reddit#update delete: tags: - "Sources" @@ -20450,10 +28104,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePersona" + operationId: "deleteSourceReddit" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Persona#delete + x-speakeasy-entity-operation: Source_Reddit#delete parameters: - name: "sourceId" schema: @@ -20461,13 +28115,13 @@ paths: type: "string" in: "path" required: true - /sources#PexelsApi: + /sources#Redshift: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePexelsApiCreateRequest" + $ref: "#/components/schemas/SourceRedshiftCreateRequest" tags: - "Sources" responses: @@ -20481,14 +28135,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePexelsApi" + operationId: "createSourceRedshift" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PexelsApi#create - /sources/{sourceId}#PexelsApi: + x-speakeasy-entity-operation: Source_Redshift#create + /sources/{sourceId}#Redshift: get: tags: - "Sources" @@ -20503,10 +28157,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePexelsApi" + operationId: "getSourceRedshift" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PexelsApi#read + x-speakeasy-entity-operation: Source_Redshift#read put: tags: - "Sources" @@ -20514,7 +28168,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePexelsApiPutRequest" + $ref: "#/components/schemas/SourceRedshiftPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20522,10 +28176,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePexelsApi" + operationId: "putSourceRedshift" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PexelsApi#update + x-speakeasy-entity-operation: Source_Redshift#update delete: tags: - "Sources" @@ -20536,10 +28190,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePexelsApi" + operationId: "deleteSourceRedshift" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PexelsApi#delete + x-speakeasy-entity-operation: Source_Redshift#delete parameters: - name: "sourceId" schema: @@ -20547,13 +28201,13 @@ paths: type: "string" in: "path" required: true - /sources#Picqer: + /sources#Referralhero: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePicqerCreateRequest" + $ref: "#/components/schemas/SourceReferralheroCreateRequest" tags: - "Sources" responses: @@ -20567,14 +28221,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePicqer" + operationId: "createSourceReferralhero" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Picqer#create - /sources/{sourceId}#Picqer: + x-speakeasy-entity-operation: Source_Referralhero#create + /sources/{sourceId}#Referralhero: get: tags: - "Sources" @@ -20589,10 +28243,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePicqer" + operationId: "getSourceReferralhero" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Picqer#read + x-speakeasy-entity-operation: Source_Referralhero#read put: tags: - "Sources" @@ -20600,7 +28254,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePicqerPutRequest" + $ref: "#/components/schemas/SourceReferralheroPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20608,10 +28262,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePicqer" + operationId: "putSourceReferralhero" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Picqer#update + x-speakeasy-entity-operation: Source_Referralhero#update delete: tags: - "Sources" @@ -20622,10 +28276,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePicqer" + operationId: "deleteSourceReferralhero" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Picqer#delete + x-speakeasy-entity-operation: Source_Referralhero#delete parameters: - name: "sourceId" schema: @@ -20633,13 +28287,13 @@ paths: type: "string" in: "path" required: true - /sources#Pinterest: + /sources#Rentcast: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePinterestCreateRequest" + $ref: "#/components/schemas/SourceRentcastCreateRequest" tags: - "Sources" responses: @@ -20653,14 +28307,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePinterest" + operationId: "createSourceRentcast" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pinterest#create - /sources/{sourceId}#Pinterest: + x-speakeasy-entity-operation: Source_Rentcast#create + /sources/{sourceId}#Rentcast: get: tags: - "Sources" @@ -20675,10 +28329,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePinterest" + operationId: "getSourceRentcast" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pinterest#read + x-speakeasy-entity-operation: Source_Rentcast#read put: tags: - "Sources" @@ -20686,7 +28340,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePinterestPutRequest" + $ref: "#/components/schemas/SourceRentcastPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20694,10 +28348,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePinterest" + operationId: "putSourceRentcast" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pinterest#update + x-speakeasy-entity-operation: Source_Rentcast#update delete: tags: - "Sources" @@ -20708,10 +28362,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePinterest" + operationId: "deleteSourceRentcast" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pinterest#delete + x-speakeasy-entity-operation: Source_Rentcast#delete parameters: - name: "sourceId" schema: @@ -20719,13 +28373,13 @@ paths: type: "string" in: "path" required: true - /sources#Pipedrive: + /sources#Repairshopr: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePipedriveCreateRequest" + $ref: "#/components/schemas/SourceRepairshoprCreateRequest" tags: - "Sources" responses: @@ -20739,14 +28393,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePipedrive" + operationId: "createSourceRepairshopr" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pipedrive#create - /sources/{sourceId}#Pipedrive: + x-speakeasy-entity-operation: Source_Repairshopr#create + /sources/{sourceId}#Repairshopr: get: tags: - "Sources" @@ -20761,10 +28415,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePipedrive" + operationId: "getSourceRepairshopr" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pipedrive#read + x-speakeasy-entity-operation: Source_Repairshopr#read put: tags: - "Sources" @@ -20772,7 +28426,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePipedrivePutRequest" + $ref: "#/components/schemas/SourceRepairshoprPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20780,10 +28434,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePipedrive" + operationId: "putSourceRepairshopr" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pipedrive#update + x-speakeasy-entity-operation: Source_Repairshopr#update delete: tags: - "Sources" @@ -20794,10 +28448,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePipedrive" + operationId: "deleteSourceRepairshopr" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pipedrive#delete + x-speakeasy-entity-operation: Source_Repairshopr#delete parameters: - name: "sourceId" schema: @@ -20805,13 +28459,13 @@ paths: type: "string" in: "path" required: true - /sources#PivotalTracker: + /sources#ReplyIo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePivotalTrackerCreateRequest" + $ref: "#/components/schemas/SourceReplyIoCreateRequest" tags: - "Sources" responses: @@ -20825,14 +28479,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePivotalTracker" + operationId: "createSourceReplyIo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PivotalTracker#create - /sources/{sourceId}#PivotalTracker: + x-speakeasy-entity-operation: Source_ReplyIo#create + /sources/{sourceId}#ReplyIo: get: tags: - "Sources" @@ -20847,10 +28501,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePivotalTracker" + operationId: "getSourceReplyIo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PivotalTracker#read + x-speakeasy-entity-operation: Source_ReplyIo#read put: tags: - "Sources" @@ -20858,7 +28512,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePivotalTrackerPutRequest" + $ref: "#/components/schemas/SourceReplyIoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20866,10 +28520,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePivotalTracker" + operationId: "putSourceReplyIo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PivotalTracker#update + x-speakeasy-entity-operation: Source_ReplyIo#update delete: tags: - "Sources" @@ -20880,10 +28534,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePivotalTracker" + operationId: "deleteSourceReplyIo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PivotalTracker#delete + x-speakeasy-entity-operation: Source_ReplyIo#delete parameters: - name: "sourceId" schema: @@ -20891,13 +28545,13 @@ paths: type: "string" in: "path" required: true - /sources#Piwik: + /sources#Retently: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePiwikCreateRequest" + $ref: "#/components/schemas/SourceRetentlyCreateRequest" tags: - "Sources" responses: @@ -20911,14 +28565,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePiwik" + operationId: "createSourceRetently" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Piwik#create - /sources/{sourceId}#Piwik: + x-speakeasy-entity-operation: Source_Retently#create + /sources/{sourceId}#Retently: get: tags: - "Sources" @@ -20933,10 +28587,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePiwik" + operationId: "getSourceRetently" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Piwik#read + x-speakeasy-entity-operation: Source_Retently#read put: tags: - "Sources" @@ -20944,7 +28598,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePiwikPutRequest" + $ref: "#/components/schemas/SourceRetentlyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -20952,10 +28606,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePiwik" + operationId: "putSourceRetently" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Piwik#update + x-speakeasy-entity-operation: Source_Retently#update delete: tags: - "Sources" @@ -20966,10 +28620,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePiwik" + operationId: "deleteSourceRetently" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Piwik#delete + x-speakeasy-entity-operation: Source_Retently#delete parameters: - name: "sourceId" schema: @@ -20977,13 +28631,13 @@ paths: type: "string" in: "path" required: true - /sources#Plaid: + /sources#Revenuecat: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePlaidCreateRequest" + $ref: "#/components/schemas/SourceRevenuecatCreateRequest" tags: - "Sources" responses: @@ -20997,14 +28651,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePlaid" + operationId: "createSourceRevenuecat" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Plaid#create - /sources/{sourceId}#Plaid: + x-speakeasy-entity-operation: Source_Revenuecat#create + /sources/{sourceId}#Revenuecat: get: tags: - "Sources" @@ -21019,10 +28673,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePlaid" + operationId: "getSourceRevenuecat" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Plaid#read + x-speakeasy-entity-operation: Source_Revenuecat#read put: tags: - "Sources" @@ -21030,7 +28684,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePlaidPutRequest" + $ref: "#/components/schemas/SourceRevenuecatPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21038,10 +28692,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePlaid" + operationId: "putSourceRevenuecat" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Plaid#update + x-speakeasy-entity-operation: Source_Revenuecat#update delete: tags: - "Sources" @@ -21052,10 +28706,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePlaid" + operationId: "deleteSourceRevenuecat" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Plaid#delete + x-speakeasy-entity-operation: Source_Revenuecat#delete parameters: - name: "sourceId" schema: @@ -21063,13 +28717,13 @@ paths: type: "string" in: "path" required: true - /sources#Planhat: + /sources#RevolutMerchant: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePlanhatCreateRequest" + $ref: "#/components/schemas/SourceRevolutMerchantCreateRequest" tags: - "Sources" responses: @@ -21083,14 +28737,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePlanhat" + operationId: "createSourceRevolutMerchant" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Planhat#create - /sources/{sourceId}#Planhat: + x-speakeasy-entity-operation: Source_RevolutMerchant#create + /sources/{sourceId}#RevolutMerchant: get: tags: - "Sources" @@ -21105,10 +28759,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePlanhat" + operationId: "getSourceRevolutMerchant" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Planhat#read + x-speakeasy-entity-operation: Source_RevolutMerchant#read put: tags: - "Sources" @@ -21116,7 +28770,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePlanhatPutRequest" + $ref: "#/components/schemas/SourceRevolutMerchantPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21124,10 +28778,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePlanhat" + operationId: "putSourceRevolutMerchant" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Planhat#update + x-speakeasy-entity-operation: Source_RevolutMerchant#update delete: tags: - "Sources" @@ -21138,10 +28792,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePlanhat" + operationId: "deleteSourceRevolutMerchant" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Planhat#delete + x-speakeasy-entity-operation: Source_RevolutMerchant#delete parameters: - name: "sourceId" schema: @@ -21149,13 +28803,13 @@ paths: type: "string" in: "path" required: true - /sources#Plausible: + /sources#RkiCovid: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePlausibleCreateRequest" + $ref: "#/components/schemas/SourceRkiCovidCreateRequest" tags: - "Sources" responses: @@ -21169,14 +28823,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePlausible" + operationId: "createSourceRkiCovid" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Plausible#create - /sources/{sourceId}#Plausible: + x-speakeasy-entity-operation: Source_RkiCovid#create + /sources/{sourceId}#RkiCovid: get: tags: - "Sources" @@ -21191,10 +28845,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePlausible" + operationId: "getSourceRkiCovid" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Plausible#read + x-speakeasy-entity-operation: Source_RkiCovid#read put: tags: - "Sources" @@ -21202,7 +28856,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePlausiblePutRequest" + $ref: "#/components/schemas/SourceRkiCovidPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21210,10 +28864,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePlausible" + operationId: "putSourceRkiCovid" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Plausible#update + x-speakeasy-entity-operation: Source_RkiCovid#update delete: tags: - "Sources" @@ -21224,10 +28878,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePlausible" + operationId: "deleteSourceRkiCovid" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Plausible#delete + x-speakeasy-entity-operation: Source_RkiCovid#delete parameters: - name: "sourceId" schema: @@ -21235,13 +28889,13 @@ paths: type: "string" in: "path" required: true - /sources#Pocket: + /sources#Rocketlane: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePocketCreateRequest" + $ref: "#/components/schemas/SourceRocketlaneCreateRequest" tags: - "Sources" responses: @@ -21255,14 +28909,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePocket" + operationId: "createSourceRocketlane" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pocket#create - /sources/{sourceId}#Pocket: + x-speakeasy-entity-operation: Source_Rocketlane#create + /sources/{sourceId}#Rocketlane: get: tags: - "Sources" @@ -21277,10 +28931,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePocket" + operationId: "getSourceRocketlane" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pocket#read + x-speakeasy-entity-operation: Source_Rocketlane#read put: tags: - "Sources" @@ -21288,7 +28942,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePocketPutRequest" + $ref: "#/components/schemas/SourceRocketlanePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21296,10 +28950,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePocket" + operationId: "putSourceRocketlane" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pocket#update + x-speakeasy-entity-operation: Source_Rocketlane#update delete: tags: - "Sources" @@ -21310,10 +28964,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePocket" + operationId: "deleteSourceRocketlane" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pocket#delete + x-speakeasy-entity-operation: Source_Rocketlane#delete parameters: - name: "sourceId" schema: @@ -21321,13 +28975,13 @@ paths: type: "string" in: "path" required: true - /sources#Pokeapi: + /sources#Rollbar: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePokeapiCreateRequest" + $ref: "#/components/schemas/SourceRollbarCreateRequest" tags: - "Sources" responses: @@ -21341,14 +28995,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePokeapi" + operationId: "createSourceRollbar" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pokeapi#create - /sources/{sourceId}#Pokeapi: + x-speakeasy-entity-operation: Source_Rollbar#create + /sources/{sourceId}#Rollbar: get: tags: - "Sources" @@ -21363,10 +29017,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePokeapi" + operationId: "getSourceRollbar" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pokeapi#read + x-speakeasy-entity-operation: Source_Rollbar#read put: tags: - "Sources" @@ -21374,7 +29028,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePokeapiPutRequest" + $ref: "#/components/schemas/SourceRollbarPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21382,10 +29036,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePokeapi" + operationId: "putSourceRollbar" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pokeapi#update + x-speakeasy-entity-operation: Source_Rollbar#update delete: tags: - "Sources" @@ -21396,10 +29050,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePokeapi" + operationId: "deleteSourceRollbar" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pokeapi#delete + x-speakeasy-entity-operation: Source_Rollbar#delete parameters: - name: "sourceId" schema: @@ -21407,13 +29061,13 @@ paths: type: "string" in: "path" required: true - /sources#PolygonStockApi: + /sources#Rootly: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePolygonStockApiCreateRequest" + $ref: "#/components/schemas/SourceRootlyCreateRequest" tags: - "Sources" responses: @@ -21427,14 +29081,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePolygonStockApi" + operationId: "createSourceRootly" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PolygonStockApi#create - /sources/{sourceId}#PolygonStockApi: + x-speakeasy-entity-operation: Source_Rootly#create + /sources/{sourceId}#Rootly: get: tags: - "Sources" @@ -21449,10 +29103,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePolygonStockApi" + operationId: "getSourceRootly" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PolygonStockApi#read + x-speakeasy-entity-operation: Source_Rootly#read put: tags: - "Sources" @@ -21460,7 +29114,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePolygonStockApiPutRequest" + $ref: "#/components/schemas/SourceRootlyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21468,10 +29122,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePolygonStockApi" + operationId: "putSourceRootly" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PolygonStockApi#update + x-speakeasy-entity-operation: Source_Rootly#update delete: tags: - "Sources" @@ -21482,10 +29136,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePolygonStockApi" + operationId: "deleteSourceRootly" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PolygonStockApi#delete + x-speakeasy-entity-operation: Source_Rootly#delete parameters: - name: "sourceId" schema: @@ -21493,13 +29147,13 @@ paths: type: "string" in: "path" required: true - /sources#Postgres: + /sources#Rss: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePostgresCreateRequest" + $ref: "#/components/schemas/SourceRssCreateRequest" tags: - "Sources" responses: @@ -21513,14 +29167,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePostgres" + operationId: "createSourceRss" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Postgres#create - /sources/{sourceId}#Postgres: + x-speakeasy-entity-operation: Source_Rss#create + /sources/{sourceId}#Rss: get: tags: - "Sources" @@ -21535,10 +29189,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePostgres" + operationId: "getSourceRss" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Postgres#read + x-speakeasy-entity-operation: Source_Rss#read put: tags: - "Sources" @@ -21546,7 +29200,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePostgresPutRequest" + $ref: "#/components/schemas/SourceRssPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21554,10 +29208,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePostgres" + operationId: "putSourceRss" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Postgres#update + x-speakeasy-entity-operation: Source_Rss#update delete: tags: - "Sources" @@ -21568,10 +29222,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePostgres" + operationId: "deleteSourceRss" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Postgres#delete + x-speakeasy-entity-operation: Source_Rss#delete parameters: - name: "sourceId" schema: @@ -21579,13 +29233,13 @@ paths: type: "string" in: "path" required: true - /sources#Posthog: + /sources#Ruddr: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePosthogCreateRequest" + $ref: "#/components/schemas/SourceRuddrCreateRequest" tags: - "Sources" responses: @@ -21599,14 +29253,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePosthog" + operationId: "createSourceRuddr" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Posthog#create - /sources/{sourceId}#Posthog: + x-speakeasy-entity-operation: Source_Ruddr#create + /sources/{sourceId}#Ruddr: get: tags: - "Sources" @@ -21621,10 +29275,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePosthog" + operationId: "getSourceRuddr" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Posthog#read + x-speakeasy-entity-operation: Source_Ruddr#read put: tags: - "Sources" @@ -21632,7 +29286,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePosthogPutRequest" + $ref: "#/components/schemas/SourceRuddrPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21640,10 +29294,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePosthog" + operationId: "putSourceRuddr" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Posthog#update + x-speakeasy-entity-operation: Source_Ruddr#update delete: tags: - "Sources" @@ -21654,10 +29308,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePosthog" + operationId: "deleteSourceRuddr" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Posthog#delete + x-speakeasy-entity-operation: Source_Ruddr#delete parameters: - name: "sourceId" schema: @@ -21665,13 +29319,13 @@ paths: type: "string" in: "path" required: true - /sources#Postmarkapp: + /sources#S3: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePostmarkappCreateRequest" + $ref: "#/components/schemas/SourceS3CreateRequest" tags: - "Sources" responses: @@ -21685,14 +29339,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePostmarkapp" + operationId: "createSourceS3" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Postmarkapp#create - /sources/{sourceId}#Postmarkapp: + x-speakeasy-entity-operation: Source_S3#create + /sources/{sourceId}#S3: get: tags: - "Sources" @@ -21707,10 +29361,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePostmarkapp" + operationId: "getSourceS3" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Postmarkapp#read + x-speakeasy-entity-operation: Source_S3#read put: tags: - "Sources" @@ -21718,7 +29372,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePostmarkappPutRequest" + $ref: "#/components/schemas/SourceS3PutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21726,10 +29380,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePostmarkapp" + operationId: "putSourceS3" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Postmarkapp#update + x-speakeasy-entity-operation: Source_S3#update delete: tags: - "Sources" @@ -21740,10 +29394,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePostmarkapp" + operationId: "deleteSourceS3" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Postmarkapp#delete + x-speakeasy-entity-operation: Source_S3#delete parameters: - name: "sourceId" schema: @@ -21751,13 +29405,13 @@ paths: type: "string" in: "path" required: true - /sources#Prestashop: + /sources#Safetyculture: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePrestashopCreateRequest" + $ref: "#/components/schemas/SourceSafetycultureCreateRequest" tags: - "Sources" responses: @@ -21771,14 +29425,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePrestashop" + operationId: "createSourceSafetyculture" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Prestashop#create - /sources/{sourceId}#Prestashop: + x-speakeasy-entity-operation: Source_Safetyculture#create + /sources/{sourceId}#Safetyculture: get: tags: - "Sources" @@ -21793,10 +29447,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePrestashop" + operationId: "getSourceSafetyculture" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Prestashop#read + x-speakeasy-entity-operation: Source_Safetyculture#read put: tags: - "Sources" @@ -21804,7 +29458,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePrestashopPutRequest" + $ref: "#/components/schemas/SourceSafetyculturePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21812,10 +29466,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePrestashop" + operationId: "putSourceSafetyculture" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Prestashop#update + x-speakeasy-entity-operation: Source_Safetyculture#update delete: tags: - "Sources" @@ -21826,10 +29480,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePrestashop" + operationId: "deleteSourceSafetyculture" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Prestashop#delete + x-speakeasy-entity-operation: Source_Safetyculture#delete parameters: - name: "sourceId" schema: @@ -21837,13 +29491,13 @@ paths: type: "string" in: "path" required: true - /sources#Primetric: + /sources#SageHr: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePrimetricCreateRequest" + $ref: "#/components/schemas/SourceSageHrCreateRequest" tags: - "Sources" responses: @@ -21857,14 +29511,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePrimetric" + operationId: "createSourceSageHr" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Primetric#create - /sources/{sourceId}#Primetric: + x-speakeasy-entity-operation: Source_SageHr#create + /sources/{sourceId}#SageHr: get: tags: - "Sources" @@ -21879,10 +29533,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePrimetric" + operationId: "getSourceSageHr" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Primetric#read + x-speakeasy-entity-operation: Source_SageHr#read put: tags: - "Sources" @@ -21890,7 +29544,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePrimetricPutRequest" + $ref: "#/components/schemas/SourceSageHrPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21898,10 +29552,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePrimetric" + operationId: "putSourceSageHr" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Primetric#update + x-speakeasy-entity-operation: Source_SageHr#update delete: tags: - "Sources" @@ -21912,10 +29566,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePrimetric" + operationId: "deleteSourceSageHr" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Primetric#delete + x-speakeasy-entity-operation: Source_SageHr#delete parameters: - name: "sourceId" schema: @@ -21923,13 +29577,13 @@ paths: type: "string" in: "path" required: true - /sources#Productboard: + /sources#Salesflare: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceProductboardCreateRequest" + $ref: "#/components/schemas/SourceSalesflareCreateRequest" tags: - "Sources" responses: @@ -21943,14 +29597,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceProductboard" + operationId: "createSourceSalesflare" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Productboard#create - /sources/{sourceId}#Productboard: + x-speakeasy-entity-operation: Source_Salesflare#create + /sources/{sourceId}#Salesflare: get: tags: - "Sources" @@ -21965,10 +29619,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceProductboard" + operationId: "getSourceSalesflare" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Productboard#read + x-speakeasy-entity-operation: Source_Salesflare#read put: tags: - "Sources" @@ -21976,7 +29630,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceProductboardPutRequest" + $ref: "#/components/schemas/SourceSalesflarePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -21984,10 +29638,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceProductboard" + operationId: "putSourceSalesflare" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Productboard#update + x-speakeasy-entity-operation: Source_Salesflare#update delete: tags: - "Sources" @@ -21998,10 +29652,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceProductboard" + operationId: "deleteSourceSalesflare" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Productboard#delete + x-speakeasy-entity-operation: Source_Salesflare#delete parameters: - name: "sourceId" schema: @@ -22009,13 +29663,13 @@ paths: type: "string" in: "path" required: true - /sources#Productive: + /sources#Salesforce: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceProductiveCreateRequest" + $ref: "#/components/schemas/SourceSalesforceCreateRequest" tags: - "Sources" responses: @@ -22029,14 +29683,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceProductive" + operationId: "createSourceSalesforce" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Productive#create - /sources/{sourceId}#Productive: + x-speakeasy-entity-operation: Source_Salesforce#create + /sources/{sourceId}#Salesforce: get: tags: - "Sources" @@ -22051,10 +29705,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceProductive" + operationId: "getSourceSalesforce" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Productive#read + x-speakeasy-entity-operation: Source_Salesforce#read put: tags: - "Sources" @@ -22062,7 +29716,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceProductivePutRequest" + $ref: "#/components/schemas/SourceSalesforcePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22070,10 +29724,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceProductive" + operationId: "putSourceSalesforce" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Productive#update + x-speakeasy-entity-operation: Source_Salesforce#update delete: tags: - "Sources" @@ -22084,10 +29738,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceProductive" + operationId: "deleteSourceSalesforce" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Productive#delete + x-speakeasy-entity-operation: Source_Salesforce#delete parameters: - name: "sourceId" schema: @@ -22095,13 +29749,13 @@ paths: type: "string" in: "path" required: true - /sources#Pypi: + /sources#Salesloft: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourcePypiCreateRequest" + $ref: "#/components/schemas/SourceSalesloftCreateRequest" tags: - "Sources" responses: @@ -22115,14 +29769,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourcePypi" + operationId: "createSourceSalesloft" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pypi#create - /sources/{sourceId}#Pypi: + x-speakeasy-entity-operation: Source_Salesloft#create + /sources/{sourceId}#Salesloft: get: tags: - "Sources" @@ -22137,10 +29791,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourcePypi" + operationId: "getSourceSalesloft" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pypi#read + x-speakeasy-entity-operation: Source_Salesloft#read put: tags: - "Sources" @@ -22148,7 +29802,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourcePypiPutRequest" + $ref: "#/components/schemas/SourceSalesloftPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22156,10 +29810,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourcePypi" + operationId: "putSourceSalesloft" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pypi#update + x-speakeasy-entity-operation: Source_Salesloft#update delete: tags: - "Sources" @@ -22170,10 +29824,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourcePypi" + operationId: "deleteSourceSalesloft" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Pypi#delete + x-speakeasy-entity-operation: Source_Salesloft#delete parameters: - name: "sourceId" schema: @@ -22181,13 +29835,13 @@ paths: type: "string" in: "path" required: true - /sources#Qualaroo: + /sources#SapFieldglass: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceQualarooCreateRequest" + $ref: "#/components/schemas/SourceSapFieldglassCreateRequest" tags: - "Sources" responses: @@ -22201,14 +29855,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceQualaroo" + operationId: "createSourceSapFieldglass" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Qualaroo#create - /sources/{sourceId}#Qualaroo: + x-speakeasy-entity-operation: Source_SapFieldglass#create + /sources/{sourceId}#SapFieldglass: get: tags: - "Sources" @@ -22223,10 +29877,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceQualaroo" + operationId: "getSourceSapFieldglass" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Qualaroo#read + x-speakeasy-entity-operation: Source_SapFieldglass#read put: tags: - "Sources" @@ -22234,7 +29888,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceQualarooPutRequest" + $ref: "#/components/schemas/SourceSapFieldglassPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22242,10 +29896,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceQualaroo" + operationId: "putSourceSapFieldglass" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Qualaroo#update + x-speakeasy-entity-operation: Source_SapFieldglass#update delete: tags: - "Sources" @@ -22256,10 +29910,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceQualaroo" + operationId: "deleteSourceSapFieldglass" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Qualaroo#delete + x-speakeasy-entity-operation: Source_SapFieldglass#delete parameters: - name: "sourceId" schema: @@ -22267,13 +29921,13 @@ paths: type: "string" in: "path" required: true - /sources#Quickbooks: + /sources#Savvycal: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceQuickbooksCreateRequest" + $ref: "#/components/schemas/SourceSavvycalCreateRequest" tags: - "Sources" responses: @@ -22287,14 +29941,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceQuickbooks" + operationId: "createSourceSavvycal" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Quickbooks#create - /sources/{sourceId}#Quickbooks: + x-speakeasy-entity-operation: Source_Savvycal#create + /sources/{sourceId}#Savvycal: get: tags: - "Sources" @@ -22309,10 +29963,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceQuickbooks" + operationId: "getSourceSavvycal" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Quickbooks#read + x-speakeasy-entity-operation: Source_Savvycal#read put: tags: - "Sources" @@ -22320,7 +29974,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceQuickbooksPutRequest" + $ref: "#/components/schemas/SourceSavvycalPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22328,10 +29982,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceQuickbooks" + operationId: "putSourceSavvycal" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Quickbooks#update + x-speakeasy-entity-operation: Source_Savvycal#update delete: tags: - "Sources" @@ -22342,10 +29996,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceQuickbooks" + operationId: "deleteSourceSavvycal" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Quickbooks#delete + x-speakeasy-entity-operation: Source_Savvycal#delete parameters: - name: "sourceId" schema: @@ -22353,13 +30007,13 @@ paths: type: "string" in: "path" required: true - /sources#Railz: + /sources#Scryfall: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRailzCreateRequest" + $ref: "#/components/schemas/SourceScryfallCreateRequest" tags: - "Sources" responses: @@ -22373,14 +30027,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRailz" + operationId: "createSourceScryfall" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Railz#create - /sources/{sourceId}#Railz: + x-speakeasy-entity-operation: Source_Scryfall#create + /sources/{sourceId}#Scryfall: get: tags: - "Sources" @@ -22395,10 +30049,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRailz" + operationId: "getSourceScryfall" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Railz#read + x-speakeasy-entity-operation: Source_Scryfall#read put: tags: - "Sources" @@ -22406,7 +30060,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRailzPutRequest" + $ref: "#/components/schemas/SourceScryfallPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22414,10 +30068,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRailz" + operationId: "putSourceScryfall" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Railz#update + x-speakeasy-entity-operation: Source_Scryfall#update delete: tags: - "Sources" @@ -22428,10 +30082,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRailz" + operationId: "deleteSourceScryfall" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Railz#delete + x-speakeasy-entity-operation: Source_Scryfall#delete parameters: - name: "sourceId" schema: @@ -22439,13 +30093,13 @@ paths: type: "string" in: "path" required: true - /sources#RdStationMarketing: + /sources#Secoda: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRdStationMarketingCreateRequest" + $ref: "#/components/schemas/SourceSecodaCreateRequest" tags: - "Sources" responses: @@ -22459,14 +30113,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRdStationMarketing" + operationId: "createSourceSecoda" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_RdStationMarketing#create - /sources/{sourceId}#RdStationMarketing: + x-speakeasy-entity-operation: Source_Secoda#create + /sources/{sourceId}#Secoda: get: tags: - "Sources" @@ -22481,10 +30135,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRdStationMarketing" + operationId: "getSourceSecoda" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_RdStationMarketing#read + x-speakeasy-entity-operation: Source_Secoda#read put: tags: - "Sources" @@ -22492,7 +30146,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRdStationMarketingPutRequest" + $ref: "#/components/schemas/SourceSecodaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22500,10 +30154,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRdStationMarketing" + operationId: "putSourceSecoda" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_RdStationMarketing#update + x-speakeasy-entity-operation: Source_Secoda#update delete: tags: - "Sources" @@ -22514,10 +30168,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRdStationMarketing" + operationId: "deleteSourceSecoda" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_RdStationMarketing#delete + x-speakeasy-entity-operation: Source_Secoda#delete parameters: - name: "sourceId" schema: @@ -22525,13 +30179,13 @@ paths: type: "string" in: "path" required: true - /sources#Recharge: + /sources#Segment: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRechargeCreateRequest" + $ref: "#/components/schemas/SourceSegmentCreateRequest" tags: - "Sources" responses: @@ -22545,14 +30199,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRecharge" + operationId: "createSourceSegment" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recharge#create - /sources/{sourceId}#Recharge: + x-speakeasy-entity-operation: Source_Segment#create + /sources/{sourceId}#Segment: get: tags: - "Sources" @@ -22567,10 +30221,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRecharge" + operationId: "getSourceSegment" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recharge#read + x-speakeasy-entity-operation: Source_Segment#read put: tags: - "Sources" @@ -22578,7 +30232,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRechargePutRequest" + $ref: "#/components/schemas/SourceSegmentPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22586,10 +30240,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRecharge" + operationId: "putSourceSegment" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recharge#update + x-speakeasy-entity-operation: Source_Segment#update delete: tags: - "Sources" @@ -22600,10 +30254,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRecharge" + operationId: "deleteSourceSegment" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recharge#delete + x-speakeasy-entity-operation: Source_Segment#delete parameters: - name: "sourceId" schema: @@ -22611,13 +30265,13 @@ paths: type: "string" in: "path" required: true - /sources#Recreation: + /sources#Sendgrid: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRecreationCreateRequest" + $ref: "#/components/schemas/SourceSendgridCreateRequest" tags: - "Sources" responses: @@ -22631,14 +30285,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRecreation" + operationId: "createSourceSendgrid" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recreation#create - /sources/{sourceId}#Recreation: + x-speakeasy-entity-operation: Source_Sendgrid#create + /sources/{sourceId}#Sendgrid: get: tags: - "Sources" @@ -22653,10 +30307,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRecreation" + operationId: "getSourceSendgrid" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recreation#read + x-speakeasy-entity-operation: Source_Sendgrid#read put: tags: - "Sources" @@ -22664,7 +30318,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRecreationPutRequest" + $ref: "#/components/schemas/SourceSendgridPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22672,10 +30326,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRecreation" + operationId: "putSourceSendgrid" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recreation#update + x-speakeasy-entity-operation: Source_Sendgrid#update delete: tags: - "Sources" @@ -22686,10 +30340,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRecreation" + operationId: "deleteSourceSendgrid" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recreation#delete + x-speakeasy-entity-operation: Source_Sendgrid#delete parameters: - name: "sourceId" schema: @@ -22697,13 +30351,13 @@ paths: type: "string" in: "path" required: true - /sources#Recruitee: + /sources#Sendinblue: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRecruiteeCreateRequest" + $ref: "#/components/schemas/SourceSendinblueCreateRequest" tags: - "Sources" responses: @@ -22717,14 +30371,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRecruitee" + operationId: "createSourceSendinblue" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recruitee#create - /sources/{sourceId}#Recruitee: + x-speakeasy-entity-operation: Source_Sendinblue#create + /sources/{sourceId}#Sendinblue: get: tags: - "Sources" @@ -22739,10 +30393,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRecruitee" + operationId: "getSourceSendinblue" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recruitee#read + x-speakeasy-entity-operation: Source_Sendinblue#read put: tags: - "Sources" @@ -22750,7 +30404,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRecruiteePutRequest" + $ref: "#/components/schemas/SourceSendinbluePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22758,10 +30412,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRecruitee" + operationId: "putSourceSendinblue" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recruitee#update + x-speakeasy-entity-operation: Source_Sendinblue#update delete: tags: - "Sources" @@ -22772,10 +30426,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRecruitee" + operationId: "deleteSourceSendinblue" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recruitee#delete + x-speakeasy-entity-operation: Source_Sendinblue#delete parameters: - name: "sourceId" schema: @@ -22783,13 +30437,13 @@ paths: type: "string" in: "path" required: true - /sources#Recurly: + /sources#Sendowl: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRecurlyCreateRequest" + $ref: "#/components/schemas/SourceSendowlCreateRequest" tags: - "Sources" responses: @@ -22803,14 +30457,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRecurly" + operationId: "createSourceSendowl" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recurly#create - /sources/{sourceId}#Recurly: + x-speakeasy-entity-operation: Source_Sendowl#create + /sources/{sourceId}#Sendowl: get: tags: - "Sources" @@ -22825,10 +30479,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRecurly" + operationId: "getSourceSendowl" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recurly#read + x-speakeasy-entity-operation: Source_Sendowl#read put: tags: - "Sources" @@ -22836,7 +30490,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRecurlyPutRequest" + $ref: "#/components/schemas/SourceSendowlPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22844,10 +30498,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRecurly" + operationId: "putSourceSendowl" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recurly#update + x-speakeasy-entity-operation: Source_Sendowl#update delete: tags: - "Sources" @@ -22858,10 +30512,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRecurly" + operationId: "deleteSourceSendowl" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Recurly#delete + x-speakeasy-entity-operation: Source_Sendowl#delete parameters: - name: "sourceId" schema: @@ -22869,13 +30523,13 @@ paths: type: "string" in: "path" required: true - /sources#Reddit: + /sources#Sendpulse: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRedditCreateRequest" + $ref: "#/components/schemas/SourceSendpulseCreateRequest" tags: - "Sources" responses: @@ -22889,14 +30543,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceReddit" + operationId: "createSourceSendpulse" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Reddit#create - /sources/{sourceId}#Reddit: + x-speakeasy-entity-operation: Source_Sendpulse#create + /sources/{sourceId}#Sendpulse: get: tags: - "Sources" @@ -22911,10 +30565,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceReddit" + operationId: "getSourceSendpulse" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Reddit#read + x-speakeasy-entity-operation: Source_Sendpulse#read put: tags: - "Sources" @@ -22922,7 +30576,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRedditPutRequest" + $ref: "#/components/schemas/SourceSendpulsePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -22930,10 +30584,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceReddit" + operationId: "putSourceSendpulse" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Reddit#update + x-speakeasy-entity-operation: Source_Sendpulse#update delete: tags: - "Sources" @@ -22944,10 +30598,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceReddit" + operationId: "deleteSourceSendpulse" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Reddit#delete + x-speakeasy-entity-operation: Source_Sendpulse#delete parameters: - name: "sourceId" schema: @@ -22955,13 +30609,13 @@ paths: type: "string" in: "path" required: true - /sources#Redshift: + /sources#Senseforce: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRedshiftCreateRequest" + $ref: "#/components/schemas/SourceSenseforceCreateRequest" tags: - "Sources" responses: @@ -22975,14 +30629,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRedshift" + operationId: "createSourceSenseforce" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Redshift#create - /sources/{sourceId}#Redshift: + x-speakeasy-entity-operation: Source_Senseforce#create + /sources/{sourceId}#Senseforce: get: tags: - "Sources" @@ -22997,10 +30651,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRedshift" + operationId: "getSourceSenseforce" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Redshift#read + x-speakeasy-entity-operation: Source_Senseforce#read put: tags: - "Sources" @@ -23008,7 +30662,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRedshiftPutRequest" + $ref: "#/components/schemas/SourceSenseforcePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23016,10 +30670,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRedshift" + operationId: "putSourceSenseforce" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Redshift#update + x-speakeasy-entity-operation: Source_Senseforce#update delete: tags: - "Sources" @@ -23030,10 +30684,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRedshift" + operationId: "deleteSourceSenseforce" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Redshift#delete + x-speakeasy-entity-operation: Source_Senseforce#delete parameters: - name: "sourceId" schema: @@ -23041,13 +30695,13 @@ paths: type: "string" in: "path" required: true - /sources#Referralhero: + /sources#Sentry: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceReferralheroCreateRequest" + $ref: "#/components/schemas/SourceSentryCreateRequest" tags: - "Sources" responses: @@ -23061,14 +30715,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceReferralhero" + operationId: "createSourceSentry" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Referralhero#create - /sources/{sourceId}#Referralhero: + x-speakeasy-entity-operation: Source_Sentry#create + /sources/{sourceId}#Sentry: get: tags: - "Sources" @@ -23083,10 +30737,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceReferralhero" + operationId: "getSourceSentry" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Referralhero#read + x-speakeasy-entity-operation: Source_Sentry#read put: tags: - "Sources" @@ -23094,7 +30748,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceReferralheroPutRequest" + $ref: "#/components/schemas/SourceSentryPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23102,10 +30756,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceReferralhero" + operationId: "putSourceSentry" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Referralhero#update + x-speakeasy-entity-operation: Source_Sentry#update delete: tags: - "Sources" @@ -23116,10 +30770,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceReferralhero" + operationId: "deleteSourceSentry" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Referralhero#delete + x-speakeasy-entity-operation: Source_Sentry#delete parameters: - name: "sourceId" schema: @@ -23127,13 +30781,13 @@ paths: type: "string" in: "path" required: true - /sources#Rentcast: + /sources#Sftp: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRentcastCreateRequest" + $ref: "#/components/schemas/SourceSftpCreateRequest" tags: - "Sources" responses: @@ -23147,14 +30801,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRentcast" + operationId: "createSourceSftp" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rentcast#create - /sources/{sourceId}#Rentcast: + x-speakeasy-entity-operation: Source_Sftp#create + /sources/{sourceId}#Sftp: get: tags: - "Sources" @@ -23169,10 +30823,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRentcast" + operationId: "getSourceSftp" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rentcast#read + x-speakeasy-entity-operation: Source_Sftp#read put: tags: - "Sources" @@ -23180,7 +30834,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRentcastPutRequest" + $ref: "#/components/schemas/SourceSftpPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23188,10 +30842,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRentcast" + operationId: "putSourceSftp" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rentcast#update + x-speakeasy-entity-operation: Source_Sftp#update delete: tags: - "Sources" @@ -23202,10 +30856,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRentcast" + operationId: "deleteSourceSftp" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rentcast#delete + x-speakeasy-entity-operation: Source_Sftp#delete parameters: - name: "sourceId" schema: @@ -23213,13 +30867,13 @@ paths: type: "string" in: "path" required: true - /sources#ReplyIo: + /sources#SftpBulk: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceReplyIoCreateRequest" + $ref: "#/components/schemas/SourceSftpBulkCreateRequest" tags: - "Sources" responses: @@ -23233,14 +30887,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceReplyIo" + operationId: "createSourceSftpBulk" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ReplyIo#create - /sources/{sourceId}#ReplyIo: + x-speakeasy-entity-operation: Source_SftpBulk#create + /sources/{sourceId}#SftpBulk: get: tags: - "Sources" @@ -23255,10 +30909,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceReplyIo" + operationId: "getSourceSftpBulk" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ReplyIo#read + x-speakeasy-entity-operation: Source_SftpBulk#read put: tags: - "Sources" @@ -23266,7 +30920,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceReplyIoPutRequest" + $ref: "#/components/schemas/SourceSftpBulkPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23274,10 +30928,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceReplyIo" + operationId: "putSourceSftpBulk" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ReplyIo#update + x-speakeasy-entity-operation: Source_SftpBulk#update delete: tags: - "Sources" @@ -23288,10 +30942,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceReplyIo" + operationId: "deleteSourceSftpBulk" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ReplyIo#delete + x-speakeasy-entity-operation: Source_SftpBulk#delete parameters: - name: "sourceId" schema: @@ -23299,13 +30953,13 @@ paths: type: "string" in: "path" required: true - /sources#Retently: + /sources#Sharetribe: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRetentlyCreateRequest" + $ref: "#/components/schemas/SourceSharetribeCreateRequest" tags: - "Sources" responses: @@ -23319,14 +30973,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRetently" + operationId: "createSourceSharetribe" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Retently#create - /sources/{sourceId}#Retently: + x-speakeasy-entity-operation: Source_Sharetribe#create + /sources/{sourceId}#Sharetribe: get: tags: - "Sources" @@ -23341,10 +30995,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRetently" + operationId: "getSourceSharetribe" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Retently#read + x-speakeasy-entity-operation: Source_Sharetribe#read put: tags: - "Sources" @@ -23352,7 +31006,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRetentlyPutRequest" + $ref: "#/components/schemas/SourceSharetribePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23360,10 +31014,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRetently" + operationId: "putSourceSharetribe" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Retently#update + x-speakeasy-entity-operation: Source_Sharetribe#update delete: tags: - "Sources" @@ -23374,10 +31028,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRetently" + operationId: "deleteSourceSharetribe" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Retently#delete + x-speakeasy-entity-operation: Source_Sharetribe#delete parameters: - name: "sourceId" schema: @@ -23385,13 +31039,13 @@ paths: type: "string" in: "path" required: true - /sources#Revenuecat: + /sources#Shippo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRevenuecatCreateRequest" + $ref: "#/components/schemas/SourceShippoCreateRequest" tags: - "Sources" responses: @@ -23405,14 +31059,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRevenuecat" + operationId: "createSourceShippo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Revenuecat#create - /sources/{sourceId}#Revenuecat: + x-speakeasy-entity-operation: Source_Shippo#create + /sources/{sourceId}#Shippo: get: tags: - "Sources" @@ -23427,10 +31081,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRevenuecat" + operationId: "getSourceShippo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Revenuecat#read + x-speakeasy-entity-operation: Source_Shippo#read put: tags: - "Sources" @@ -23438,7 +31092,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRevenuecatPutRequest" + $ref: "#/components/schemas/SourceShippoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23446,10 +31100,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRevenuecat" + operationId: "putSourceShippo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Revenuecat#update + x-speakeasy-entity-operation: Source_Shippo#update delete: tags: - "Sources" @@ -23460,10 +31114,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRevenuecat" + operationId: "deleteSourceShippo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Revenuecat#delete + x-speakeasy-entity-operation: Source_Shippo#delete parameters: - name: "sourceId" schema: @@ -23471,13 +31125,13 @@ paths: type: "string" in: "path" required: true - /sources#RkiCovid: + /sources#Shopify: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRkiCovidCreateRequest" + $ref: "#/components/schemas/SourceShopifyCreateRequest" tags: - "Sources" responses: @@ -23491,14 +31145,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRkiCovid" + operationId: "createSourceShopify" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_RkiCovid#create - /sources/{sourceId}#RkiCovid: + x-speakeasy-entity-operation: Source_Shopify#create + /sources/{sourceId}#Shopify: get: tags: - "Sources" @@ -23513,10 +31167,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRkiCovid" + operationId: "getSourceShopify" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_RkiCovid#read + x-speakeasy-entity-operation: Source_Shopify#read put: tags: - "Sources" @@ -23524,7 +31178,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRkiCovidPutRequest" + $ref: "#/components/schemas/SourceShopifyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23532,10 +31186,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRkiCovid" + operationId: "putSourceShopify" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_RkiCovid#update + x-speakeasy-entity-operation: Source_Shopify#update delete: tags: - "Sources" @@ -23546,10 +31200,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRkiCovid" + operationId: "deleteSourceShopify" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_RkiCovid#delete + x-speakeasy-entity-operation: Source_Shopify#delete parameters: - name: "sourceId" schema: @@ -23557,13 +31211,13 @@ paths: type: "string" in: "path" required: true - /sources#Rollbar: + /sources#Shortcut: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRollbarCreateRequest" + $ref: "#/components/schemas/SourceShortcutCreateRequest" tags: - "Sources" responses: @@ -23577,14 +31231,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRollbar" + operationId: "createSourceShortcut" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rollbar#create - /sources/{sourceId}#Rollbar: + x-speakeasy-entity-operation: Source_Shortcut#create + /sources/{sourceId}#Shortcut: get: tags: - "Sources" @@ -23599,10 +31253,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRollbar" + operationId: "getSourceShortcut" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rollbar#read + x-speakeasy-entity-operation: Source_Shortcut#read put: tags: - "Sources" @@ -23610,7 +31264,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRollbarPutRequest" + $ref: "#/components/schemas/SourceShortcutPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23618,10 +31272,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRollbar" + operationId: "putSourceShortcut" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rollbar#update + x-speakeasy-entity-operation: Source_Shortcut#update delete: tags: - "Sources" @@ -23632,10 +31286,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRollbar" + operationId: "deleteSourceShortcut" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rollbar#delete + x-speakeasy-entity-operation: Source_Shortcut#delete parameters: - name: "sourceId" schema: @@ -23643,13 +31297,13 @@ paths: type: "string" in: "path" required: true - /sources#Rootly: + /sources#Shortio: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRootlyCreateRequest" + $ref: "#/components/schemas/SourceShortioCreateRequest" tags: - "Sources" responses: @@ -23663,14 +31317,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRootly" + operationId: "createSourceShortio" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rootly#create - /sources/{sourceId}#Rootly: + x-speakeasy-entity-operation: Source_Shortio#create + /sources/{sourceId}#Shortio: get: tags: - "Sources" @@ -23685,10 +31339,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRootly" + operationId: "getSourceShortio" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rootly#read + x-speakeasy-entity-operation: Source_Shortio#read put: tags: - "Sources" @@ -23696,7 +31350,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRootlyPutRequest" + $ref: "#/components/schemas/SourceShortioPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23704,10 +31358,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRootly" + operationId: "putSourceShortio" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rootly#update + x-speakeasy-entity-operation: Source_Shortio#update delete: tags: - "Sources" @@ -23718,10 +31372,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRootly" + operationId: "deleteSourceShortio" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rootly#delete + x-speakeasy-entity-operation: Source_Shortio#delete parameters: - name: "sourceId" schema: @@ -23729,13 +31383,13 @@ paths: type: "string" in: "path" required: true - /sources#Rss: + /sources#SigmaComputing: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceRssCreateRequest" + $ref: "#/components/schemas/SourceSigmaComputingCreateRequest" tags: - "Sources" responses: @@ -23749,14 +31403,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceRss" + operationId: "createSourceSigmaComputing" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rss#create - /sources/{sourceId}#Rss: + x-speakeasy-entity-operation: Source_SigmaComputing#create + /sources/{sourceId}#SigmaComputing: get: tags: - "Sources" @@ -23771,10 +31425,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceRss" + operationId: "getSourceSigmaComputing" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rss#read + x-speakeasy-entity-operation: Source_SigmaComputing#read put: tags: - "Sources" @@ -23782,7 +31436,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceRssPutRequest" + $ref: "#/components/schemas/SourceSigmaComputingPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23790,10 +31444,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceRss" + operationId: "putSourceSigmaComputing" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rss#update + x-speakeasy-entity-operation: Source_SigmaComputing#update delete: tags: - "Sources" @@ -23804,10 +31458,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceRss" + operationId: "deleteSourceSigmaComputing" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Rss#delete + x-speakeasy-entity-operation: Source_SigmaComputing#delete parameters: - name: "sourceId" schema: @@ -23815,13 +31469,13 @@ paths: type: "string" in: "path" required: true - /sources#S3: + /sources#Simfin: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceS3CreateRequest" + $ref: "#/components/schemas/SourceSimfinCreateRequest" tags: - "Sources" responses: @@ -23835,14 +31489,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceS3" + operationId: "createSourceSimfin" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_S3#create - /sources/{sourceId}#S3: + x-speakeasy-entity-operation: Source_Simfin#create + /sources/{sourceId}#Simfin: get: tags: - "Sources" @@ -23857,10 +31511,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceS3" + operationId: "getSourceSimfin" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_S3#read + x-speakeasy-entity-operation: Source_Simfin#read put: tags: - "Sources" @@ -23868,7 +31522,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceS3PutRequest" + $ref: "#/components/schemas/SourceSimfinPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23876,10 +31530,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceS3" + operationId: "putSourceSimfin" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_S3#update + x-speakeasy-entity-operation: Source_Simfin#update delete: tags: - "Sources" @@ -23890,10 +31544,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceS3" + operationId: "deleteSourceSimfin" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_S3#delete + x-speakeasy-entity-operation: Source_Simfin#delete parameters: - name: "sourceId" schema: @@ -23901,13 +31555,13 @@ paths: type: "string" in: "path" required: true - /sources#Safetyculture: + /sources#Simplecast: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSafetycultureCreateRequest" + $ref: "#/components/schemas/SourceSimplecastCreateRequest" tags: - "Sources" responses: @@ -23921,14 +31575,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSafetyculture" + operationId: "createSourceSimplecast" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Safetyculture#create - /sources/{sourceId}#Safetyculture: + x-speakeasy-entity-operation: Source_Simplecast#create + /sources/{sourceId}#Simplecast: get: tags: - "Sources" @@ -23943,10 +31597,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSafetyculture" + operationId: "getSourceSimplecast" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Safetyculture#read + x-speakeasy-entity-operation: Source_Simplecast#read put: tags: - "Sources" @@ -23954,7 +31608,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSafetyculturePutRequest" + $ref: "#/components/schemas/SourceSimplecastPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -23962,10 +31616,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSafetyculture" + operationId: "putSourceSimplecast" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Safetyculture#update + x-speakeasy-entity-operation: Source_Simplecast#update delete: tags: - "Sources" @@ -23976,10 +31630,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSafetyculture" + operationId: "deleteSourceSimplecast" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Safetyculture#delete + x-speakeasy-entity-operation: Source_Simplecast#delete parameters: - name: "sourceId" schema: @@ -23987,13 +31641,13 @@ paths: type: "string" in: "path" required: true - /sources#SageHr: + /sources#Simplesat: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSageHrCreateRequest" + $ref: "#/components/schemas/SourceSimplesatCreateRequest" tags: - "Sources" responses: @@ -24007,14 +31661,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSageHr" + operationId: "createSourceSimplesat" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SageHr#create - /sources/{sourceId}#SageHr: + x-speakeasy-entity-operation: Source_Simplesat#create + /sources/{sourceId}#Simplesat: get: tags: - "Sources" @@ -24029,10 +31683,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSageHr" + operationId: "getSourceSimplesat" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SageHr#read + x-speakeasy-entity-operation: Source_Simplesat#read put: tags: - "Sources" @@ -24040,7 +31694,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSageHrPutRequest" + $ref: "#/components/schemas/SourceSimplesatPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24048,10 +31702,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSageHr" + operationId: "putSourceSimplesat" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SageHr#update + x-speakeasy-entity-operation: Source_Simplesat#update delete: tags: - "Sources" @@ -24062,10 +31716,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSageHr" + operationId: "deleteSourceSimplesat" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SageHr#delete + x-speakeasy-entity-operation: Source_Simplesat#delete parameters: - name: "sourceId" schema: @@ -24073,13 +31727,13 @@ paths: type: "string" in: "path" required: true - /sources#Salesforce: + /sources#Slack: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSalesforceCreateRequest" + $ref: "#/components/schemas/SourceSlackCreateRequest" tags: - "Sources" responses: @@ -24093,14 +31747,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSalesforce" + operationId: "createSourceSlack" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Salesforce#create - /sources/{sourceId}#Salesforce: + x-speakeasy-entity-operation: Source_Slack#create + /sources/{sourceId}#Slack: get: tags: - "Sources" @@ -24115,10 +31769,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSalesforce" + operationId: "getSourceSlack" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Salesforce#read + x-speakeasy-entity-operation: Source_Slack#read put: tags: - "Sources" @@ -24126,7 +31780,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSalesforcePutRequest" + $ref: "#/components/schemas/SourceSlackPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24134,10 +31788,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSalesforce" + operationId: "putSourceSlack" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Salesforce#update + x-speakeasy-entity-operation: Source_Slack#update delete: tags: - "Sources" @@ -24148,10 +31802,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSalesforce" + operationId: "deleteSourceSlack" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Salesforce#delete + x-speakeasy-entity-operation: Source_Slack#delete parameters: - name: "sourceId" schema: @@ -24159,13 +31813,13 @@ paths: type: "string" in: "path" required: true - /sources#Salesloft: + /sources#Smaily: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSalesloftCreateRequest" + $ref: "#/components/schemas/SourceSmailyCreateRequest" tags: - "Sources" responses: @@ -24179,14 +31833,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSalesloft" + operationId: "createSourceSmaily" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Salesloft#create - /sources/{sourceId}#Salesloft: + x-speakeasy-entity-operation: Source_Smaily#create + /sources/{sourceId}#Smaily: get: tags: - "Sources" @@ -24201,10 +31855,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSalesloft" + operationId: "getSourceSmaily" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Salesloft#read + x-speakeasy-entity-operation: Source_Smaily#read put: tags: - "Sources" @@ -24212,7 +31866,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSalesloftPutRequest" + $ref: "#/components/schemas/SourceSmailyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24220,10 +31874,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSalesloft" + operationId: "putSourceSmaily" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Salesloft#update + x-speakeasy-entity-operation: Source_Smaily#update delete: tags: - "Sources" @@ -24234,10 +31888,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSalesloft" + operationId: "deleteSourceSmaily" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Salesloft#delete + x-speakeasy-entity-operation: Source_Smaily#delete parameters: - name: "sourceId" schema: @@ -24245,13 +31899,13 @@ paths: type: "string" in: "path" required: true - /sources#SapFieldglass: + /sources#Smartengage: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSapFieldglassCreateRequest" + $ref: "#/components/schemas/SourceSmartengageCreateRequest" tags: - "Sources" responses: @@ -24265,14 +31919,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSapFieldglass" + operationId: "createSourceSmartengage" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SapFieldglass#create - /sources/{sourceId}#SapFieldglass: + x-speakeasy-entity-operation: Source_Smartengage#create + /sources/{sourceId}#Smartengage: get: tags: - "Sources" @@ -24287,10 +31941,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSapFieldglass" + operationId: "getSourceSmartengage" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SapFieldglass#read + x-speakeasy-entity-operation: Source_Smartengage#read put: tags: - "Sources" @@ -24298,7 +31952,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSapFieldglassPutRequest" + $ref: "#/components/schemas/SourceSmartengagePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24306,10 +31960,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSapFieldglass" + operationId: "putSourceSmartengage" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SapFieldglass#update + x-speakeasy-entity-operation: Source_Smartengage#update delete: tags: - "Sources" @@ -24320,10 +31974,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSapFieldglass" + operationId: "deleteSourceSmartengage" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SapFieldglass#delete + x-speakeasy-entity-operation: Source_Smartengage#delete parameters: - name: "sourceId" schema: @@ -24331,13 +31985,13 @@ paths: type: "string" in: "path" required: true - /sources#Savvycal: + /sources#Smartreach: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSavvycalCreateRequest" + $ref: "#/components/schemas/SourceSmartreachCreateRequest" tags: - "Sources" responses: @@ -24351,14 +32005,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSavvycal" + operationId: "createSourceSmartreach" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Savvycal#create - /sources/{sourceId}#Savvycal: + x-speakeasy-entity-operation: Source_Smartreach#create + /sources/{sourceId}#Smartreach: get: tags: - "Sources" @@ -24373,10 +32027,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSavvycal" + operationId: "getSourceSmartreach" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Savvycal#read + x-speakeasy-entity-operation: Source_Smartreach#read put: tags: - "Sources" @@ -24384,7 +32038,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSavvycalPutRequest" + $ref: "#/components/schemas/SourceSmartreachPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24392,10 +32046,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSavvycal" + operationId: "putSourceSmartreach" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Savvycal#update + x-speakeasy-entity-operation: Source_Smartreach#update delete: tags: - "Sources" @@ -24406,10 +32060,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSavvycal" + operationId: "deleteSourceSmartreach" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Savvycal#delete + x-speakeasy-entity-operation: Source_Smartreach#delete parameters: - name: "sourceId" schema: @@ -24417,13 +32071,13 @@ paths: type: "string" in: "path" required: true - /sources#Scryfall: + /sources#Smartsheets: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceScryfallCreateRequest" + $ref: "#/components/schemas/SourceSmartsheetsCreateRequest" tags: - "Sources" responses: @@ -24437,14 +32091,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceScryfall" + operationId: "createSourceSmartsheets" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Scryfall#create - /sources/{sourceId}#Scryfall: + x-speakeasy-entity-operation: Source_Smartsheets#create + /sources/{sourceId}#Smartsheets: get: tags: - "Sources" @@ -24459,10 +32113,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceScryfall" + operationId: "getSourceSmartsheets" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Scryfall#read + x-speakeasy-entity-operation: Source_Smartsheets#read put: tags: - "Sources" @@ -24470,7 +32124,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceScryfallPutRequest" + $ref: "#/components/schemas/SourceSmartsheetsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24478,10 +32132,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceScryfall" + operationId: "putSourceSmartsheets" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Scryfall#update + x-speakeasy-entity-operation: Source_Smartsheets#update delete: tags: - "Sources" @@ -24492,10 +32146,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceScryfall" + operationId: "deleteSourceSmartsheets" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Scryfall#delete + x-speakeasy-entity-operation: Source_Smartsheets#delete parameters: - name: "sourceId" schema: @@ -24503,13 +32157,13 @@ paths: type: "string" in: "path" required: true - /sources#Secoda: + /sources#Smartwaiver: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSecodaCreateRequest" + $ref: "#/components/schemas/SourceSmartwaiverCreateRequest" tags: - "Sources" responses: @@ -24523,14 +32177,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSecoda" + operationId: "createSourceSmartwaiver" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Secoda#create - /sources/{sourceId}#Secoda: + x-speakeasy-entity-operation: Source_Smartwaiver#create + /sources/{sourceId}#Smartwaiver: get: tags: - "Sources" @@ -24545,10 +32199,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSecoda" + operationId: "getSourceSmartwaiver" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Secoda#read + x-speakeasy-entity-operation: Source_Smartwaiver#read put: tags: - "Sources" @@ -24556,7 +32210,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSecodaPutRequest" + $ref: "#/components/schemas/SourceSmartwaiverPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24564,10 +32218,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSecoda" + operationId: "putSourceSmartwaiver" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Secoda#update + x-speakeasy-entity-operation: Source_Smartwaiver#update delete: tags: - "Sources" @@ -24578,10 +32232,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSecoda" + operationId: "deleteSourceSmartwaiver" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Secoda#delete + x-speakeasy-entity-operation: Source_Smartwaiver#delete parameters: - name: "sourceId" schema: @@ -24589,13 +32243,13 @@ paths: type: "string" in: "path" required: true - /sources#Segment: + /sources#SnapchatMarketing: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSegmentCreateRequest" + $ref: "#/components/schemas/SourceSnapchatMarketingCreateRequest" tags: - "Sources" responses: @@ -24609,14 +32263,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSegment" + operationId: "createSourceSnapchatMarketing" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Segment#create - /sources/{sourceId}#Segment: + x-speakeasy-entity-operation: Source_SnapchatMarketing#create + /sources/{sourceId}#SnapchatMarketing: get: tags: - "Sources" @@ -24631,10 +32285,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSegment" + operationId: "getSourceSnapchatMarketing" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Segment#read + x-speakeasy-entity-operation: Source_SnapchatMarketing#read put: tags: - "Sources" @@ -24642,7 +32296,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSegmentPutRequest" + $ref: "#/components/schemas/SourceSnapchatMarketingPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24650,10 +32304,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSegment" + operationId: "putSourceSnapchatMarketing" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Segment#update + x-speakeasy-entity-operation: Source_SnapchatMarketing#update delete: tags: - "Sources" @@ -24664,10 +32318,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSegment" + operationId: "deleteSourceSnapchatMarketing" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Segment#delete + x-speakeasy-entity-operation: Source_SnapchatMarketing#delete parameters: - name: "sourceId" schema: @@ -24675,13 +32329,13 @@ paths: type: "string" in: "path" required: true - /sources#Sendgrid: + /sources#Snowflake: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSendgridCreateRequest" + $ref: "#/components/schemas/SourceSnowflakeCreateRequest" tags: - "Sources" responses: @@ -24695,14 +32349,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSendgrid" + operationId: "createSourceSnowflake" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sendgrid#create - /sources/{sourceId}#Sendgrid: + x-speakeasy-entity-operation: Source_Snowflake#create + /sources/{sourceId}#Snowflake: get: tags: - "Sources" @@ -24717,10 +32371,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSendgrid" + operationId: "getSourceSnowflake" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sendgrid#read + x-speakeasy-entity-operation: Source_Snowflake#read put: tags: - "Sources" @@ -24728,7 +32382,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSendgridPutRequest" + $ref: "#/components/schemas/SourceSnowflakePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24736,10 +32390,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSendgrid" + operationId: "putSourceSnowflake" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sendgrid#update + x-speakeasy-entity-operation: Source_Snowflake#update delete: tags: - "Sources" @@ -24750,10 +32404,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSendgrid" + operationId: "deleteSourceSnowflake" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sendgrid#delete + x-speakeasy-entity-operation: Source_Snowflake#delete parameters: - name: "sourceId" schema: @@ -24761,13 +32415,13 @@ paths: type: "string" in: "path" required: true - /sources#Sendinblue: + /sources#SolarwindsServiceDesk: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSendinblueCreateRequest" + $ref: "#/components/schemas/SourceSolarwindsServiceDeskCreateRequest" tags: - "Sources" responses: @@ -24781,14 +32435,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSendinblue" + operationId: "createSourceSolarwindsServiceDesk" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sendinblue#create - /sources/{sourceId}#Sendinblue: + x-speakeasy-entity-operation: Source_SolarwindsServiceDesk#create + /sources/{sourceId}#SolarwindsServiceDesk: get: tags: - "Sources" @@ -24803,10 +32457,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSendinblue" + operationId: "getSourceSolarwindsServiceDesk" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sendinblue#read + x-speakeasy-entity-operation: Source_SolarwindsServiceDesk#read put: tags: - "Sources" @@ -24814,7 +32468,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSendinbluePutRequest" + $ref: "#/components/schemas/SourceSolarwindsServiceDeskPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24822,10 +32476,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSendinblue" + operationId: "putSourceSolarwindsServiceDesk" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sendinblue#update + x-speakeasy-entity-operation: Source_SolarwindsServiceDesk#update delete: tags: - "Sources" @@ -24836,10 +32490,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSendinblue" + operationId: "deleteSourceSolarwindsServiceDesk" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sendinblue#delete + x-speakeasy-entity-operation: Source_SolarwindsServiceDesk#delete parameters: - name: "sourceId" schema: @@ -24847,13 +32501,13 @@ paths: type: "string" in: "path" required: true - /sources#Senseforce: + /sources#SonarCloud: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSenseforceCreateRequest" + $ref: "#/components/schemas/SourceSonarCloudCreateRequest" tags: - "Sources" responses: @@ -24867,14 +32521,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSenseforce" + operationId: "createSourceSonarCloud" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Senseforce#create - /sources/{sourceId}#Senseforce: + x-speakeasy-entity-operation: Source_SonarCloud#create + /sources/{sourceId}#SonarCloud: get: tags: - "Sources" @@ -24889,10 +32543,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSenseforce" + operationId: "getSourceSonarCloud" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Senseforce#read + x-speakeasy-entity-operation: Source_SonarCloud#read put: tags: - "Sources" @@ -24900,7 +32554,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSenseforcePutRequest" + $ref: "#/components/schemas/SourceSonarCloudPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24908,10 +32562,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSenseforce" + operationId: "putSourceSonarCloud" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Senseforce#update + x-speakeasy-entity-operation: Source_SonarCloud#update delete: tags: - "Sources" @@ -24922,10 +32576,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSenseforce" + operationId: "deleteSourceSonarCloud" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Senseforce#delete + x-speakeasy-entity-operation: Source_SonarCloud#delete parameters: - name: "sourceId" schema: @@ -24933,13 +32587,13 @@ paths: type: "string" in: "path" required: true - /sources#Sentry: + /sources#SpacexApi: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSentryCreateRequest" + $ref: "#/components/schemas/SourceSpacexApiCreateRequest" tags: - "Sources" responses: @@ -24953,14 +32607,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSentry" + operationId: "createSourceSpacexApi" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sentry#create - /sources/{sourceId}#Sentry: + x-speakeasy-entity-operation: Source_SpacexApi#create + /sources/{sourceId}#SpacexApi: get: tags: - "Sources" @@ -24975,10 +32629,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSentry" + operationId: "getSourceSpacexApi" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sentry#read + x-speakeasy-entity-operation: Source_SpacexApi#read put: tags: - "Sources" @@ -24986,7 +32640,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSentryPutRequest" + $ref: "#/components/schemas/SourceSpacexApiPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -24994,10 +32648,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSentry" + operationId: "putSourceSpacexApi" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sentry#update + x-speakeasy-entity-operation: Source_SpacexApi#update delete: tags: - "Sources" @@ -25008,10 +32662,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSentry" + operationId: "deleteSourceSpacexApi" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sentry#delete + x-speakeasy-entity-operation: Source_SpacexApi#delete parameters: - name: "sourceId" schema: @@ -25019,13 +32673,13 @@ paths: type: "string" in: "path" required: true - /sources#Sftp: + /sources#Sparkpost: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSftpCreateRequest" + $ref: "#/components/schemas/SourceSparkpostCreateRequest" tags: - "Sources" responses: @@ -25039,14 +32693,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSftp" + operationId: "createSourceSparkpost" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sftp#create - /sources/{sourceId}#Sftp: + x-speakeasy-entity-operation: Source_Sparkpost#create + /sources/{sourceId}#Sparkpost: get: tags: - "Sources" @@ -25061,10 +32715,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSftp" + operationId: "getSourceSparkpost" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sftp#read + x-speakeasy-entity-operation: Source_Sparkpost#read put: tags: - "Sources" @@ -25072,7 +32726,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSftpPutRequest" + $ref: "#/components/schemas/SourceSparkpostPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25080,10 +32734,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSftp" + operationId: "putSourceSparkpost" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sftp#update + x-speakeasy-entity-operation: Source_Sparkpost#update delete: tags: - "Sources" @@ -25094,10 +32748,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSftp" + operationId: "deleteSourceSparkpost" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sftp#delete + x-speakeasy-entity-operation: Source_Sparkpost#delete parameters: - name: "sourceId" schema: @@ -25105,13 +32759,13 @@ paths: type: "string" in: "path" required: true - /sources#SftpBulk: + /sources#SplitIo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSftpBulkCreateRequest" + $ref: "#/components/schemas/SourceSplitIoCreateRequest" tags: - "Sources" responses: @@ -25125,14 +32779,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSftpBulk" + operationId: "createSourceSplitIo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SftpBulk#create - /sources/{sourceId}#SftpBulk: + x-speakeasy-entity-operation: Source_SplitIo#create + /sources/{sourceId}#SplitIo: get: tags: - "Sources" @@ -25147,10 +32801,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSftpBulk" + operationId: "getSourceSplitIo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SftpBulk#read + x-speakeasy-entity-operation: Source_SplitIo#read put: tags: - "Sources" @@ -25158,7 +32812,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSftpBulkPutRequest" + $ref: "#/components/schemas/SourceSplitIoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25166,10 +32820,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSftpBulk" + operationId: "putSourceSplitIo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SftpBulk#update + x-speakeasy-entity-operation: Source_SplitIo#update delete: tags: - "Sources" @@ -25180,10 +32834,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSftpBulk" + operationId: "deleteSourceSplitIo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SftpBulk#delete + x-speakeasy-entity-operation: Source_SplitIo#delete parameters: - name: "sourceId" schema: @@ -25191,13 +32845,13 @@ paths: type: "string" in: "path" required: true - /sources#Sharetribe: + /sources#Spotlercrm: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSharetribeCreateRequest" + $ref: "#/components/schemas/SourceSpotlercrmCreateRequest" tags: - "Sources" responses: @@ -25211,14 +32865,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSharetribe" + operationId: "createSourceSpotlercrm" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sharetribe#create - /sources/{sourceId}#Sharetribe: + x-speakeasy-entity-operation: Source_Spotlercrm#create + /sources/{sourceId}#Spotlercrm: get: tags: - "Sources" @@ -25233,10 +32887,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSharetribe" + operationId: "getSourceSpotlercrm" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sharetribe#read + x-speakeasy-entity-operation: Source_Spotlercrm#read put: tags: - "Sources" @@ -25244,7 +32898,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSharetribePutRequest" + $ref: "#/components/schemas/SourceSpotlercrmPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25252,10 +32906,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSharetribe" + operationId: "putSourceSpotlercrm" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sharetribe#update + x-speakeasy-entity-operation: Source_Spotlercrm#update delete: tags: - "Sources" @@ -25266,10 +32920,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSharetribe" + operationId: "deleteSourceSpotlercrm" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sharetribe#delete + x-speakeasy-entity-operation: Source_Spotlercrm#delete parameters: - name: "sourceId" schema: @@ -25277,13 +32931,13 @@ paths: type: "string" in: "path" required: true - /sources#Shopify: + /sources#Square: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceShopifyCreateRequest" + $ref: "#/components/schemas/SourceSquareCreateRequest" tags: - "Sources" responses: @@ -25297,14 +32951,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceShopify" + operationId: "createSourceSquare" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shopify#create - /sources/{sourceId}#Shopify: + x-speakeasy-entity-operation: Source_Square#create + /sources/{sourceId}#Square: get: tags: - "Sources" @@ -25319,10 +32973,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceShopify" + operationId: "getSourceSquare" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shopify#read + x-speakeasy-entity-operation: Source_Square#read put: tags: - "Sources" @@ -25330,7 +32984,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceShopifyPutRequest" + $ref: "#/components/schemas/SourceSquarePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25338,10 +32992,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceShopify" + operationId: "putSourceSquare" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shopify#update + x-speakeasy-entity-operation: Source_Square#update delete: tags: - "Sources" @@ -25352,10 +33006,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceShopify" + operationId: "deleteSourceSquare" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shopify#delete + x-speakeasy-entity-operation: Source_Square#delete parameters: - name: "sourceId" schema: @@ -25363,13 +33017,13 @@ paths: type: "string" in: "path" required: true - /sources#Shortcut: + /sources#Squarespace: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceShortcutCreateRequest" + $ref: "#/components/schemas/SourceSquarespaceCreateRequest" tags: - "Sources" responses: @@ -25383,14 +33037,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceShortcut" + operationId: "createSourceSquarespace" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shortcut#create - /sources/{sourceId}#Shortcut: + x-speakeasy-entity-operation: Source_Squarespace#create + /sources/{sourceId}#Squarespace: get: tags: - "Sources" @@ -25405,10 +33059,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceShortcut" + operationId: "getSourceSquarespace" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shortcut#read + x-speakeasy-entity-operation: Source_Squarespace#read put: tags: - "Sources" @@ -25416,7 +33070,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceShortcutPutRequest" + $ref: "#/components/schemas/SourceSquarespacePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25424,10 +33078,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceShortcut" + operationId: "putSourceSquarespace" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shortcut#update + x-speakeasy-entity-operation: Source_Squarespace#update delete: tags: - "Sources" @@ -25438,10 +33092,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceShortcut" + operationId: "deleteSourceSquarespace" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shortcut#delete + x-speakeasy-entity-operation: Source_Squarespace#delete parameters: - name: "sourceId" schema: @@ -25449,13 +33103,13 @@ paths: type: "string" in: "path" required: true - /sources#Shortio: + /sources#Statsig: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceShortioCreateRequest" + $ref: "#/components/schemas/SourceStatsigCreateRequest" tags: - "Sources" responses: @@ -25469,14 +33123,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceShortio" + operationId: "createSourceStatsig" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shortio#create - /sources/{sourceId}#Shortio: + x-speakeasy-entity-operation: Source_Statsig#create + /sources/{sourceId}#Statsig: get: tags: - "Sources" @@ -25491,10 +33145,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceShortio" + operationId: "getSourceStatsig" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shortio#read + x-speakeasy-entity-operation: Source_Statsig#read put: tags: - "Sources" @@ -25502,7 +33156,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceShortioPutRequest" + $ref: "#/components/schemas/SourceStatsigPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25510,10 +33164,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceShortio" + operationId: "putSourceStatsig" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shortio#update + x-speakeasy-entity-operation: Source_Statsig#update delete: tags: - "Sources" @@ -25524,10 +33178,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceShortio" + operationId: "deleteSourceStatsig" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Shortio#delete + x-speakeasy-entity-operation: Source_Statsig#delete parameters: - name: "sourceId" schema: @@ -25535,13 +33189,13 @@ paths: type: "string" in: "path" required: true - /sources#SigmaComputing: + /sources#Statuspage: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSigmaComputingCreateRequest" + $ref: "#/components/schemas/SourceStatuspageCreateRequest" tags: - "Sources" responses: @@ -25555,14 +33209,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSigmaComputing" + operationId: "createSourceStatuspage" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SigmaComputing#create - /sources/{sourceId}#SigmaComputing: + x-speakeasy-entity-operation: Source_Statuspage#create + /sources/{sourceId}#Statuspage: get: tags: - "Sources" @@ -25577,10 +33231,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSigmaComputing" + operationId: "getSourceStatuspage" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SigmaComputing#read + x-speakeasy-entity-operation: Source_Statuspage#read put: tags: - "Sources" @@ -25588,7 +33242,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSigmaComputingPutRequest" + $ref: "#/components/schemas/SourceStatuspagePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25596,10 +33250,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSigmaComputing" + operationId: "putSourceStatuspage" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SigmaComputing#update + x-speakeasy-entity-operation: Source_Statuspage#update delete: tags: - "Sources" @@ -25610,10 +33264,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSigmaComputing" + operationId: "deleteSourceStatuspage" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SigmaComputing#delete + x-speakeasy-entity-operation: Source_Statuspage#delete parameters: - name: "sourceId" schema: @@ -25621,13 +33275,13 @@ paths: type: "string" in: "path" required: true - /sources#Simplecast: + /sources#Stockdata: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSimplecastCreateRequest" + $ref: "#/components/schemas/SourceStockdataCreateRequest" tags: - "Sources" responses: @@ -25641,14 +33295,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSimplecast" + operationId: "createSourceStockdata" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Simplecast#create - /sources/{sourceId}#Simplecast: + x-speakeasy-entity-operation: Source_Stockdata#create + /sources/{sourceId}#Stockdata: get: tags: - "Sources" @@ -25663,10 +33317,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSimplecast" + operationId: "getSourceStockdata" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Simplecast#read + x-speakeasy-entity-operation: Source_Stockdata#read put: tags: - "Sources" @@ -25674,7 +33328,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSimplecastPutRequest" + $ref: "#/components/schemas/SourceStockdataPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25682,10 +33336,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSimplecast" + operationId: "putSourceStockdata" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Simplecast#update + x-speakeasy-entity-operation: Source_Stockdata#update delete: tags: - "Sources" @@ -25696,10 +33350,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSimplecast" + operationId: "deleteSourceStockdata" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Simplecast#delete + x-speakeasy-entity-operation: Source_Stockdata#delete parameters: - name: "sourceId" schema: @@ -25707,13 +33361,13 @@ paths: type: "string" in: "path" required: true - /sources#Simplesat: + /sources#Strava: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSimplesatCreateRequest" + $ref: "#/components/schemas/SourceStravaCreateRequest" tags: - "Sources" responses: @@ -25727,14 +33381,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSimplesat" + operationId: "createSourceStrava" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Simplesat#create - /sources/{sourceId}#Simplesat: + x-speakeasy-entity-operation: Source_Strava#create + /sources/{sourceId}#Strava: get: tags: - "Sources" @@ -25749,10 +33403,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSimplesat" + operationId: "getSourceStrava" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Simplesat#read + x-speakeasy-entity-operation: Source_Strava#read put: tags: - "Sources" @@ -25760,7 +33414,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSimplesatPutRequest" + $ref: "#/components/schemas/SourceStravaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25768,10 +33422,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSimplesat" + operationId: "putSourceStrava" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Simplesat#update + x-speakeasy-entity-operation: Source_Strava#update delete: tags: - "Sources" @@ -25782,10 +33436,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSimplesat" + operationId: "deleteSourceStrava" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Simplesat#delete + x-speakeasy-entity-operation: Source_Strava#delete parameters: - name: "sourceId" schema: @@ -25793,13 +33447,13 @@ paths: type: "string" in: "path" required: true - /sources#Slack: + /sources#Stripe: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSlackCreateRequest" + $ref: "#/components/schemas/SourceStripeCreateRequest" tags: - "Sources" responses: @@ -25813,14 +33467,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSlack" + operationId: "createSourceStripe" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Slack#create - /sources/{sourceId}#Slack: + x-speakeasy-entity-operation: Source_Stripe#create + /sources/{sourceId}#Stripe: get: tags: - "Sources" @@ -25835,10 +33489,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSlack" + operationId: "getSourceStripe" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Slack#read + x-speakeasy-entity-operation: Source_Stripe#read put: tags: - "Sources" @@ -25846,7 +33500,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSlackPutRequest" + $ref: "#/components/schemas/SourceStripePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25854,10 +33508,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSlack" + operationId: "putSourceStripe" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Slack#update + x-speakeasy-entity-operation: Source_Stripe#update delete: tags: - "Sources" @@ -25868,10 +33522,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSlack" + operationId: "deleteSourceStripe" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Slack#delete + x-speakeasy-entity-operation: Source_Stripe#delete parameters: - name: "sourceId" schema: @@ -25879,13 +33533,13 @@ paths: type: "string" in: "path" required: true - /sources#Smaily: + /sources#SurveySparrow: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSmailyCreateRequest" + $ref: "#/components/schemas/SourceSurveySparrowCreateRequest" tags: - "Sources" responses: @@ -25899,14 +33553,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSmaily" + operationId: "createSourceSurveySparrow" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smaily#create - /sources/{sourceId}#Smaily: + x-speakeasy-entity-operation: Source_SurveySparrow#create + /sources/{sourceId}#SurveySparrow: get: tags: - "Sources" @@ -25921,10 +33575,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSmaily" + operationId: "getSourceSurveySparrow" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smaily#read + x-speakeasy-entity-operation: Source_SurveySparrow#read put: tags: - "Sources" @@ -25932,7 +33586,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSmailyPutRequest" + $ref: "#/components/schemas/SourceSurveySparrowPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -25940,10 +33594,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSmaily" + operationId: "putSourceSurveySparrow" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smaily#update + x-speakeasy-entity-operation: Source_SurveySparrow#update delete: tags: - "Sources" @@ -25954,10 +33608,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSmaily" + operationId: "deleteSourceSurveySparrow" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smaily#delete + x-speakeasy-entity-operation: Source_SurveySparrow#delete parameters: - name: "sourceId" schema: @@ -25965,13 +33619,13 @@ paths: type: "string" in: "path" required: true - /sources#Smartengage: + /sources#Surveymonkey: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSmartengageCreateRequest" + $ref: "#/components/schemas/SourceSurveymonkeyCreateRequest" tags: - "Sources" responses: @@ -25985,14 +33639,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSmartengage" + operationId: "createSourceSurveymonkey" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartengage#create - /sources/{sourceId}#Smartengage: + x-speakeasy-entity-operation: Source_Surveymonkey#create + /sources/{sourceId}#Surveymonkey: get: tags: - "Sources" @@ -26007,10 +33661,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSmartengage" + operationId: "getSourceSurveymonkey" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartengage#read + x-speakeasy-entity-operation: Source_Surveymonkey#read put: tags: - "Sources" @@ -26018,7 +33672,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSmartengagePutRequest" + $ref: "#/components/schemas/SourceSurveymonkeyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26026,10 +33680,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSmartengage" + operationId: "putSourceSurveymonkey" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartengage#update + x-speakeasy-entity-operation: Source_Surveymonkey#update delete: tags: - "Sources" @@ -26040,10 +33694,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSmartengage" + operationId: "deleteSourceSurveymonkey" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartengage#delete + x-speakeasy-entity-operation: Source_Surveymonkey#delete parameters: - name: "sourceId" schema: @@ -26051,13 +33705,13 @@ paths: type: "string" in: "path" required: true - /sources#Smartsheets: + /sources#Survicate: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSmartsheetsCreateRequest" + $ref: "#/components/schemas/SourceSurvicateCreateRequest" tags: - "Sources" responses: @@ -26071,14 +33725,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSmartsheets" + operationId: "createSourceSurvicate" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartsheets#create - /sources/{sourceId}#Smartsheets: + x-speakeasy-entity-operation: Source_Survicate#create + /sources/{sourceId}#Survicate: get: tags: - "Sources" @@ -26093,10 +33747,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSmartsheets" + operationId: "getSourceSurvicate" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartsheets#read + x-speakeasy-entity-operation: Source_Survicate#read put: tags: - "Sources" @@ -26104,7 +33758,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSmartsheetsPutRequest" + $ref: "#/components/schemas/SourceSurvicatePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26112,10 +33766,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSmartsheets" + operationId: "putSourceSurvicate" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartsheets#update + x-speakeasy-entity-operation: Source_Survicate#update delete: tags: - "Sources" @@ -26126,10 +33780,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSmartsheets" + operationId: "deleteSourceSurvicate" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartsheets#delete + x-speakeasy-entity-operation: Source_Survicate#delete parameters: - name: "sourceId" schema: @@ -26137,13 +33791,13 @@ paths: type: "string" in: "path" required: true - /sources#Smartwaiver: + /sources#Systeme: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSmartwaiverCreateRequest" + $ref: "#/components/schemas/SourceSystemeCreateRequest" tags: - "Sources" responses: @@ -26157,14 +33811,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSmartwaiver" + operationId: "createSourceSysteme" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartwaiver#create - /sources/{sourceId}#Smartwaiver: + x-speakeasy-entity-operation: Source_Systeme#create + /sources/{sourceId}#Systeme: get: tags: - "Sources" @@ -26179,10 +33833,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSmartwaiver" + operationId: "getSourceSysteme" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartwaiver#read + x-speakeasy-entity-operation: Source_Systeme#read put: tags: - "Sources" @@ -26190,7 +33844,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSmartwaiverPutRequest" + $ref: "#/components/schemas/SourceSystemePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26198,10 +33852,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSmartwaiver" + operationId: "putSourceSysteme" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartwaiver#update + x-speakeasy-entity-operation: Source_Systeme#update delete: tags: - "Sources" @@ -26212,10 +33866,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSmartwaiver" + operationId: "deleteSourceSysteme" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Smartwaiver#delete + x-speakeasy-entity-operation: Source_Systeme#delete parameters: - name: "sourceId" schema: @@ -26223,13 +33877,13 @@ paths: type: "string" in: "path" required: true - /sources#SnapchatMarketing: + /sources#Taboola: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSnapchatMarketingCreateRequest" + $ref: "#/components/schemas/SourceTaboolaCreateRequest" tags: - "Sources" responses: @@ -26243,14 +33897,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSnapchatMarketing" + operationId: "createSourceTaboola" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SnapchatMarketing#create - /sources/{sourceId}#SnapchatMarketing: + x-speakeasy-entity-operation: Source_Taboola#create + /sources/{sourceId}#Taboola: get: tags: - "Sources" @@ -26265,10 +33919,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSnapchatMarketing" + operationId: "getSourceTaboola" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SnapchatMarketing#read + x-speakeasy-entity-operation: Source_Taboola#read put: tags: - "Sources" @@ -26276,7 +33930,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSnapchatMarketingPutRequest" + $ref: "#/components/schemas/SourceTaboolaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26284,10 +33938,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSnapchatMarketing" + operationId: "putSourceTaboola" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SnapchatMarketing#update + x-speakeasy-entity-operation: Source_Taboola#update delete: tags: - "Sources" @@ -26298,10 +33952,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSnapchatMarketing" + operationId: "deleteSourceTaboola" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SnapchatMarketing#delete + x-speakeasy-entity-operation: Source_Taboola#delete parameters: - name: "sourceId" schema: @@ -26309,13 +33963,13 @@ paths: type: "string" in: "path" required: true - /sources#Snowflake: + /sources#Teamtailor: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSnowflakeCreateRequest" + $ref: "#/components/schemas/SourceTeamtailorCreateRequest" tags: - "Sources" responses: @@ -26329,14 +33983,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSnowflake" + operationId: "createSourceTeamtailor" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Snowflake#create - /sources/{sourceId}#Snowflake: + x-speakeasy-entity-operation: Source_Teamtailor#create + /sources/{sourceId}#Teamtailor: get: tags: - "Sources" @@ -26351,10 +34005,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSnowflake" + operationId: "getSourceTeamtailor" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Snowflake#read + x-speakeasy-entity-operation: Source_Teamtailor#read put: tags: - "Sources" @@ -26362,7 +34016,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSnowflakePutRequest" + $ref: "#/components/schemas/SourceTeamtailorPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26370,10 +34024,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSnowflake" + operationId: "putSourceTeamtailor" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Snowflake#update + x-speakeasy-entity-operation: Source_Teamtailor#update delete: tags: - "Sources" @@ -26384,10 +34038,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSnowflake" + operationId: "deleteSourceTeamtailor" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Snowflake#delete + x-speakeasy-entity-operation: Source_Teamtailor#delete parameters: - name: "sourceId" schema: @@ -26395,13 +34049,13 @@ paths: type: "string" in: "path" required: true - /sources#SolarwindsServiceDesk: + /sources#Teamwork: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSolarwindsServiceDeskCreateRequest" + $ref: "#/components/schemas/SourceTeamworkCreateRequest" tags: - "Sources" responses: @@ -26415,14 +34069,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSolarwindsServiceDesk" + operationId: "createSourceTeamwork" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SolarwindsServiceDesk#create - /sources/{sourceId}#SolarwindsServiceDesk: + x-speakeasy-entity-operation: Source_Teamwork#create + /sources/{sourceId}#Teamwork: get: tags: - "Sources" @@ -26437,10 +34091,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSolarwindsServiceDesk" + operationId: "getSourceTeamwork" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SolarwindsServiceDesk#read + x-speakeasy-entity-operation: Source_Teamwork#read put: tags: - "Sources" @@ -26448,7 +34102,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSolarwindsServiceDeskPutRequest" + $ref: "#/components/schemas/SourceTeamworkPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26456,10 +34110,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSolarwindsServiceDesk" + operationId: "putSourceTeamwork" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SolarwindsServiceDesk#update + x-speakeasy-entity-operation: Source_Teamwork#update delete: tags: - "Sources" @@ -26470,10 +34124,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSolarwindsServiceDesk" + operationId: "deleteSourceTeamwork" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SolarwindsServiceDesk#delete + x-speakeasy-entity-operation: Source_Teamwork#delete parameters: - name: "sourceId" schema: @@ -26481,13 +34135,13 @@ paths: type: "string" in: "path" required: true - /sources#SonarCloud: + /sources#Tempo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSonarCloudCreateRequest" + $ref: "#/components/schemas/SourceTempoCreateRequest" tags: - "Sources" responses: @@ -26501,14 +34155,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSonarCloud" + operationId: "createSourceTempo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SonarCloud#create - /sources/{sourceId}#SonarCloud: + x-speakeasy-entity-operation: Source_Tempo#create + /sources/{sourceId}#Tempo: get: tags: - "Sources" @@ -26523,10 +34177,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSonarCloud" + operationId: "getSourceTempo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SonarCloud#read + x-speakeasy-entity-operation: Source_Tempo#read put: tags: - "Sources" @@ -26534,7 +34188,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSonarCloudPutRequest" + $ref: "#/components/schemas/SourceTempoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26542,10 +34196,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSonarCloud" + operationId: "putSourceTempo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SonarCloud#update + x-speakeasy-entity-operation: Source_Tempo#update delete: tags: - "Sources" @@ -26556,10 +34210,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSonarCloud" + operationId: "deleteSourceTempo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SonarCloud#delete + x-speakeasy-entity-operation: Source_Tempo#delete parameters: - name: "sourceId" schema: @@ -26567,13 +34221,13 @@ paths: type: "string" in: "path" required: true - /sources#SpacexApi: + /sources#Testrail: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSpacexApiCreateRequest" + $ref: "#/components/schemas/SourceTestrailCreateRequest" tags: - "Sources" responses: @@ -26587,14 +34241,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSpacexApi" + operationId: "createSourceTestrail" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SpacexApi#create - /sources/{sourceId}#SpacexApi: + x-speakeasy-entity-operation: Source_Testrail#create + /sources/{sourceId}#Testrail: get: tags: - "Sources" @@ -26609,10 +34263,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSpacexApi" + operationId: "getSourceTestrail" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SpacexApi#read + x-speakeasy-entity-operation: Source_Testrail#read put: tags: - "Sources" @@ -26620,7 +34274,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSpacexApiPutRequest" + $ref: "#/components/schemas/SourceTestrailPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26628,10 +34282,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSpacexApi" + operationId: "putSourceTestrail" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SpacexApi#update + x-speakeasy-entity-operation: Source_Testrail#update delete: tags: - "Sources" @@ -26642,10 +34296,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSpacexApi" + operationId: "deleteSourceTestrail" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SpacexApi#delete + x-speakeasy-entity-operation: Source_Testrail#delete parameters: - name: "sourceId" schema: @@ -26653,13 +34307,13 @@ paths: type: "string" in: "path" required: true - /sources#Sparkpost: + /sources#TheGuardianApi: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSparkpostCreateRequest" + $ref: "#/components/schemas/SourceTheGuardianApiCreateRequest" tags: - "Sources" responses: @@ -26673,14 +34327,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSparkpost" + operationId: "createSourceTheGuardianApi" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sparkpost#create - /sources/{sourceId}#Sparkpost: + x-speakeasy-entity-operation: Source_TheGuardianApi#create + /sources/{sourceId}#TheGuardianApi: get: tags: - "Sources" @@ -26695,10 +34349,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSparkpost" + operationId: "getSourceTheGuardianApi" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sparkpost#read + x-speakeasy-entity-operation: Source_TheGuardianApi#read put: tags: - "Sources" @@ -26706,7 +34360,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSparkpostPutRequest" + $ref: "#/components/schemas/SourceTheGuardianApiPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26714,10 +34368,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSparkpost" + operationId: "putSourceTheGuardianApi" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sparkpost#update + x-speakeasy-entity-operation: Source_TheGuardianApi#update delete: tags: - "Sources" @@ -26728,10 +34382,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSparkpost" + operationId: "deleteSourceTheGuardianApi" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Sparkpost#delete + x-speakeasy-entity-operation: Source_TheGuardianApi#delete parameters: - name: "sourceId" schema: @@ -26739,13 +34393,13 @@ paths: type: "string" in: "path" required: true - /sources#SplitIo: + /sources#Thinkific: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSplitIoCreateRequest" + $ref: "#/components/schemas/SourceThinkificCreateRequest" tags: - "Sources" responses: @@ -26759,14 +34413,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSplitIo" + operationId: "createSourceThinkific" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SplitIo#create - /sources/{sourceId}#SplitIo: + x-speakeasy-entity-operation: Source_Thinkific#create + /sources/{sourceId}#Thinkific: get: tags: - "Sources" @@ -26781,10 +34435,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSplitIo" + operationId: "getSourceThinkific" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SplitIo#read + x-speakeasy-entity-operation: Source_Thinkific#read put: tags: - "Sources" @@ -26792,7 +34446,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSplitIoPutRequest" + $ref: "#/components/schemas/SourceThinkificPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26800,10 +34454,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSplitIo" + operationId: "putSourceThinkific" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SplitIo#update + x-speakeasy-entity-operation: Source_Thinkific#update delete: tags: - "Sources" @@ -26814,10 +34468,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSplitIo" + operationId: "deleteSourceThinkific" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SplitIo#delete + x-speakeasy-entity-operation: Source_Thinkific#delete parameters: - name: "sourceId" schema: @@ -26825,13 +34479,13 @@ paths: type: "string" in: "path" required: true - /sources#Square: + /sources#Ticketmaster: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSquareCreateRequest" + $ref: "#/components/schemas/SourceTicketmasterCreateRequest" tags: - "Sources" responses: @@ -26845,14 +34499,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSquare" + operationId: "createSourceTicketmaster" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Square#create - /sources/{sourceId}#Square: + x-speakeasy-entity-operation: Source_Ticketmaster#create + /sources/{sourceId}#Ticketmaster: get: tags: - "Sources" @@ -26867,10 +34521,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSquare" + operationId: "getSourceTicketmaster" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Square#read + x-speakeasy-entity-operation: Source_Ticketmaster#read put: tags: - "Sources" @@ -26878,7 +34532,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSquarePutRequest" + $ref: "#/components/schemas/SourceTicketmasterPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26886,10 +34540,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSquare" + operationId: "putSourceTicketmaster" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Square#update + x-speakeasy-entity-operation: Source_Ticketmaster#update delete: tags: - "Sources" @@ -26900,10 +34554,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSquare" + operationId: "deleteSourceTicketmaster" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Square#delete + x-speakeasy-entity-operation: Source_Ticketmaster#delete parameters: - name: "sourceId" schema: @@ -26911,13 +34565,13 @@ paths: type: "string" in: "path" required: true - /sources#Squarespace: + /sources#Tickettailor: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSquarespaceCreateRequest" + $ref: "#/components/schemas/SourceTickettailorCreateRequest" tags: - "Sources" responses: @@ -26931,14 +34585,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSquarespace" + operationId: "createSourceTickettailor" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Squarespace#create - /sources/{sourceId}#Squarespace: + x-speakeasy-entity-operation: Source_Tickettailor#create + /sources/{sourceId}#Tickettailor: get: tags: - "Sources" @@ -26953,10 +34607,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSquarespace" + operationId: "getSourceTickettailor" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Squarespace#read + x-speakeasy-entity-operation: Source_Tickettailor#read put: tags: - "Sources" @@ -26964,7 +34618,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSquarespacePutRequest" + $ref: "#/components/schemas/SourceTickettailorPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -26972,10 +34626,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSquarespace" + operationId: "putSourceTickettailor" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Squarespace#update + x-speakeasy-entity-operation: Source_Tickettailor#update delete: tags: - "Sources" @@ -26986,10 +34640,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSquarespace" + operationId: "deleteSourceTickettailor" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Squarespace#delete + x-speakeasy-entity-operation: Source_Tickettailor#delete parameters: - name: "sourceId" schema: @@ -26997,13 +34651,13 @@ paths: type: "string" in: "path" required: true - /sources#Statsig: + /sources#TiktokMarketing: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceStatsigCreateRequest" + $ref: "#/components/schemas/SourceTiktokMarketingCreateRequest" tags: - "Sources" responses: @@ -27017,14 +34671,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceStatsig" + operationId: "createSourceTiktokMarketing" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Statsig#create - /sources/{sourceId}#Statsig: + x-speakeasy-entity-operation: Source_TiktokMarketing#create + /sources/{sourceId}#TiktokMarketing: get: tags: - "Sources" @@ -27039,10 +34693,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceStatsig" + operationId: "getSourceTiktokMarketing" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Statsig#read + x-speakeasy-entity-operation: Source_TiktokMarketing#read put: tags: - "Sources" @@ -27050,7 +34704,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceStatsigPutRequest" + $ref: "#/components/schemas/SourceTiktokMarketingPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27058,10 +34712,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceStatsig" + operationId: "putSourceTiktokMarketing" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Statsig#update + x-speakeasy-entity-operation: Source_TiktokMarketing#update delete: tags: - "Sources" @@ -27072,10 +34726,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceStatsig" + operationId: "deleteSourceTiktokMarketing" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Statsig#delete + x-speakeasy-entity-operation: Source_TiktokMarketing#delete parameters: - name: "sourceId" schema: @@ -27083,13 +34737,13 @@ paths: type: "string" in: "path" required: true - /sources#Statuspage: + /sources#Timely: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceStatuspageCreateRequest" + $ref: "#/components/schemas/SourceTimelyCreateRequest" tags: - "Sources" responses: @@ -27103,14 +34757,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceStatuspage" + operationId: "createSourceTimely" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Statuspage#create - /sources/{sourceId}#Statuspage: + x-speakeasy-entity-operation: Source_Timely#create + /sources/{sourceId}#Timely: get: tags: - "Sources" @@ -27125,10 +34779,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceStatuspage" + operationId: "getSourceTimely" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Statuspage#read + x-speakeasy-entity-operation: Source_Timely#read put: tags: - "Sources" @@ -27136,7 +34790,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceStatuspagePutRequest" + $ref: "#/components/schemas/SourceTimelyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27144,10 +34798,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceStatuspage" + operationId: "putSourceTimely" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Statuspage#update + x-speakeasy-entity-operation: Source_Timely#update delete: tags: - "Sources" @@ -27158,10 +34812,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceStatuspage" + operationId: "deleteSourceTimely" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Statuspage#delete + x-speakeasy-entity-operation: Source_Timely#delete parameters: - name: "sourceId" schema: @@ -27169,13 +34823,13 @@ paths: type: "string" in: "path" required: true - /sources#Strava: + /sources#Tinyemail: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceStravaCreateRequest" + $ref: "#/components/schemas/SourceTinyemailCreateRequest" tags: - "Sources" responses: @@ -27189,14 +34843,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceStrava" + operationId: "createSourceTinyemail" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Strava#create - /sources/{sourceId}#Strava: + x-speakeasy-entity-operation: Source_Tinyemail#create + /sources/{sourceId}#Tinyemail: get: tags: - "Sources" @@ -27211,10 +34865,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceStrava" + operationId: "getSourceTinyemail" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Strava#read + x-speakeasy-entity-operation: Source_Tinyemail#read put: tags: - "Sources" @@ -27222,7 +34876,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceStravaPutRequest" + $ref: "#/components/schemas/SourceTinyemailPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27230,10 +34884,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceStrava" + operationId: "putSourceTinyemail" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Strava#update + x-speakeasy-entity-operation: Source_Tinyemail#update delete: tags: - "Sources" @@ -27244,10 +34898,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceStrava" + operationId: "deleteSourceTinyemail" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Strava#delete + x-speakeasy-entity-operation: Source_Tinyemail#delete parameters: - name: "sourceId" schema: @@ -27255,13 +34909,13 @@ paths: type: "string" in: "path" required: true - /sources#Stripe: + /sources#Todoist: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceStripeCreateRequest" + $ref: "#/components/schemas/SourceTodoistCreateRequest" tags: - "Sources" responses: @@ -27275,14 +34929,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceStripe" + operationId: "createSourceTodoist" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Stripe#create - /sources/{sourceId}#Stripe: + x-speakeasy-entity-operation: Source_Todoist#create + /sources/{sourceId}#Todoist: get: tags: - "Sources" @@ -27297,10 +34951,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceStripe" + operationId: "getSourceTodoist" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Stripe#read + x-speakeasy-entity-operation: Source_Todoist#read put: tags: - "Sources" @@ -27308,7 +34962,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceStripePutRequest" + $ref: "#/components/schemas/SourceTodoistPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27316,10 +34970,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceStripe" + operationId: "putSourceTodoist" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Stripe#update + x-speakeasy-entity-operation: Source_Todoist#update delete: tags: - "Sources" @@ -27330,10 +34984,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceStripe" + operationId: "deleteSourceTodoist" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Stripe#delete + x-speakeasy-entity-operation: Source_Todoist#delete parameters: - name: "sourceId" schema: @@ -27341,13 +34995,13 @@ paths: type: "string" in: "path" required: true - /sources#SurveySparrow: + /sources#TrackPms: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSurveySparrowCreateRequest" + $ref: "#/components/schemas/SourceTrackPmsCreateRequest" tags: - "Sources" responses: @@ -27361,14 +35015,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSurveySparrow" + operationId: "createSourceTrackPms" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SurveySparrow#create - /sources/{sourceId}#SurveySparrow: + x-speakeasy-entity-operation: Source_TrackPms#create + /sources/{sourceId}#TrackPms: get: tags: - "Sources" @@ -27383,10 +35037,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSurveySparrow" + operationId: "getSourceTrackPms" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SurveySparrow#read + x-speakeasy-entity-operation: Source_TrackPms#read put: tags: - "Sources" @@ -27394,7 +35048,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSurveySparrowPutRequest" + $ref: "#/components/schemas/SourceTrackPmsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27402,10 +35056,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSurveySparrow" + operationId: "putSourceTrackPms" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SurveySparrow#update + x-speakeasy-entity-operation: Source_TrackPms#update delete: tags: - "Sources" @@ -27416,10 +35070,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSurveySparrow" + operationId: "deleteSourceTrackPms" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_SurveySparrow#delete + x-speakeasy-entity-operation: Source_TrackPms#delete parameters: - name: "sourceId" schema: @@ -27427,13 +35081,13 @@ paths: type: "string" in: "path" required: true - /sources#Surveymonkey: + /sources#Trello: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSurveymonkeyCreateRequest" + $ref: "#/components/schemas/SourceTrelloCreateRequest" tags: - "Sources" responses: @@ -27447,14 +35101,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSurveymonkey" + operationId: "createSourceTrello" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Surveymonkey#create - /sources/{sourceId}#Surveymonkey: + x-speakeasy-entity-operation: Source_Trello#create + /sources/{sourceId}#Trello: get: tags: - "Sources" @@ -27469,10 +35123,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSurveymonkey" + operationId: "getSourceTrello" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Surveymonkey#read + x-speakeasy-entity-operation: Source_Trello#read put: tags: - "Sources" @@ -27480,7 +35134,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSurveymonkeyPutRequest" + $ref: "#/components/schemas/SourceTrelloPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27488,10 +35142,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSurveymonkey" + operationId: "putSourceTrello" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Surveymonkey#update + x-speakeasy-entity-operation: Source_Trello#update delete: tags: - "Sources" @@ -27502,10 +35156,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSurveymonkey" + operationId: "deleteSourceTrello" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Surveymonkey#delete + x-speakeasy-entity-operation: Source_Trello#delete parameters: - name: "sourceId" schema: @@ -27513,13 +35167,13 @@ paths: type: "string" in: "path" required: true - /sources#Survicate: + /sources#Tremendous: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceSurvicateCreateRequest" + $ref: "#/components/schemas/SourceTremendousCreateRequest" tags: - "Sources" responses: @@ -27533,14 +35187,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceSurvicate" + operationId: "createSourceTremendous" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Survicate#create - /sources/{sourceId}#Survicate: + x-speakeasy-entity-operation: Source_Tremendous#create + /sources/{sourceId}#Tremendous: get: tags: - "Sources" @@ -27555,10 +35209,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceSurvicate" + operationId: "getSourceTremendous" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Survicate#read + x-speakeasy-entity-operation: Source_Tremendous#read put: tags: - "Sources" @@ -27566,7 +35220,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceSurvicatePutRequest" + $ref: "#/components/schemas/SourceTremendousPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27574,10 +35228,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceSurvicate" + operationId: "putSourceTremendous" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Survicate#update + x-speakeasy-entity-operation: Source_Tremendous#update delete: tags: - "Sources" @@ -27588,10 +35242,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceSurvicate" + operationId: "deleteSourceTremendous" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Survicate#delete + x-speakeasy-entity-operation: Source_Tremendous#delete parameters: - name: "sourceId" schema: @@ -27599,13 +35253,13 @@ paths: type: "string" in: "path" required: true - /sources#Teamtailor: + /sources#Trustpilot: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTeamtailorCreateRequest" + $ref: "#/components/schemas/SourceTrustpilotCreateRequest" tags: - "Sources" responses: @@ -27619,14 +35273,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTeamtailor" + operationId: "createSourceTrustpilot" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Teamtailor#create - /sources/{sourceId}#Teamtailor: + x-speakeasy-entity-operation: Source_Trustpilot#create + /sources/{sourceId}#Trustpilot: get: tags: - "Sources" @@ -27641,10 +35295,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTeamtailor" + operationId: "getSourceTrustpilot" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Teamtailor#read + x-speakeasy-entity-operation: Source_Trustpilot#read put: tags: - "Sources" @@ -27652,7 +35306,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTeamtailorPutRequest" + $ref: "#/components/schemas/SourceTrustpilotPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27660,10 +35314,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTeamtailor" + operationId: "putSourceTrustpilot" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Teamtailor#update + x-speakeasy-entity-operation: Source_Trustpilot#update delete: tags: - "Sources" @@ -27674,10 +35328,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTeamtailor" + operationId: "deleteSourceTrustpilot" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Teamtailor#delete + x-speakeasy-entity-operation: Source_Trustpilot#delete parameters: - name: "sourceId" schema: @@ -27685,13 +35339,13 @@ paths: type: "string" in: "path" required: true - /sources#Teamwork: + /sources#TvmazeSchedule: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTeamworkCreateRequest" + $ref: "#/components/schemas/SourceTvmazeScheduleCreateRequest" tags: - "Sources" responses: @@ -27705,14 +35359,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTeamwork" + operationId: "createSourceTvmazeSchedule" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Teamwork#create - /sources/{sourceId}#Teamwork: + x-speakeasy-entity-operation: Source_TvmazeSchedule#create + /sources/{sourceId}#TvmazeSchedule: get: tags: - "Sources" @@ -27727,10 +35381,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTeamwork" + operationId: "getSourceTvmazeSchedule" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Teamwork#read + x-speakeasy-entity-operation: Source_TvmazeSchedule#read put: tags: - "Sources" @@ -27738,7 +35392,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTeamworkPutRequest" + $ref: "#/components/schemas/SourceTvmazeSchedulePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27746,10 +35400,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTeamwork" + operationId: "putSourceTvmazeSchedule" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Teamwork#update + x-speakeasy-entity-operation: Source_TvmazeSchedule#update delete: tags: - "Sources" @@ -27760,10 +35414,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTeamwork" + operationId: "deleteSourceTvmazeSchedule" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Teamwork#delete + x-speakeasy-entity-operation: Source_TvmazeSchedule#delete parameters: - name: "sourceId" schema: @@ -27771,13 +35425,13 @@ paths: type: "string" in: "path" required: true - /sources#Tempo: + /sources#TwelveData: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTempoCreateRequest" + $ref: "#/components/schemas/SourceTwelveDataCreateRequest" tags: - "Sources" responses: @@ -27791,14 +35445,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTempo" + operationId: "createSourceTwelveData" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Tempo#create - /sources/{sourceId}#Tempo: + x-speakeasy-entity-operation: Source_TwelveData#create + /sources/{sourceId}#TwelveData: get: tags: - "Sources" @@ -27813,10 +35467,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTempo" + operationId: "getSourceTwelveData" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Tempo#read + x-speakeasy-entity-operation: Source_TwelveData#read put: tags: - "Sources" @@ -27824,7 +35478,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTempoPutRequest" + $ref: "#/components/schemas/SourceTwelveDataPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27832,10 +35486,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTempo" + operationId: "putSourceTwelveData" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Tempo#update + x-speakeasy-entity-operation: Source_TwelveData#update delete: tags: - "Sources" @@ -27846,10 +35500,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTempo" + operationId: "deleteSourceTwelveData" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Tempo#delete + x-speakeasy-entity-operation: Source_TwelveData#delete parameters: - name: "sourceId" schema: @@ -27857,13 +35511,13 @@ paths: type: "string" in: "path" required: true - /sources#Testrail: + /sources#Twilio: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTestrailCreateRequest" + $ref: "#/components/schemas/SourceTwilioCreateRequest" tags: - "Sources" responses: @@ -27877,14 +35531,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTestrail" + operationId: "createSourceTwilio" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Testrail#create - /sources/{sourceId}#Testrail: + x-speakeasy-entity-operation: Source_Twilio#create + /sources/{sourceId}#Twilio: get: tags: - "Sources" @@ -27899,10 +35553,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTestrail" + operationId: "getSourceTwilio" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Testrail#read + x-speakeasy-entity-operation: Source_Twilio#read put: tags: - "Sources" @@ -27910,7 +35564,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTestrailPutRequest" + $ref: "#/components/schemas/SourceTwilioPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -27918,10 +35572,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTestrail" + operationId: "putSourceTwilio" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Testrail#update + x-speakeasy-entity-operation: Source_Twilio#update delete: tags: - "Sources" @@ -27932,10 +35586,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTestrail" + operationId: "deleteSourceTwilio" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Testrail#delete + x-speakeasy-entity-operation: Source_Twilio#delete parameters: - name: "sourceId" schema: @@ -27943,13 +35597,13 @@ paths: type: "string" in: "path" required: true - /sources#TheGuardianApi: + /sources#TwilioTaskrouter: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTheGuardianApiCreateRequest" + $ref: "#/components/schemas/SourceTwilioTaskrouterCreateRequest" tags: - "Sources" responses: @@ -27963,14 +35617,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTheGuardianApi" + operationId: "createSourceTwilioTaskrouter" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TheGuardianApi#create - /sources/{sourceId}#TheGuardianApi: + x-speakeasy-entity-operation: Source_TwilioTaskrouter#create + /sources/{sourceId}#TwilioTaskrouter: get: tags: - "Sources" @@ -27985,10 +35639,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTheGuardianApi" + operationId: "getSourceTwilioTaskrouter" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TheGuardianApi#read + x-speakeasy-entity-operation: Source_TwilioTaskrouter#read put: tags: - "Sources" @@ -27996,7 +35650,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTheGuardianApiPutRequest" + $ref: "#/components/schemas/SourceTwilioTaskrouterPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28004,10 +35658,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTheGuardianApi" + operationId: "putSourceTwilioTaskrouter" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TheGuardianApi#update + x-speakeasy-entity-operation: Source_TwilioTaskrouter#update delete: tags: - "Sources" @@ -28018,10 +35672,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTheGuardianApi" + operationId: "deleteSourceTwilioTaskrouter" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TheGuardianApi#delete + x-speakeasy-entity-operation: Source_TwilioTaskrouter#delete parameters: - name: "sourceId" schema: @@ -28029,13 +35683,13 @@ paths: type: "string" in: "path" required: true - /sources#Thinkific: + /sources#Twitter: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceThinkificCreateRequest" + $ref: "#/components/schemas/SourceTwitterCreateRequest" tags: - "Sources" responses: @@ -28049,14 +35703,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceThinkific" + operationId: "createSourceTwitter" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Thinkific#create - /sources/{sourceId}#Thinkific: + x-speakeasy-entity-operation: Source_Twitter#create + /sources/{sourceId}#Twitter: get: tags: - "Sources" @@ -28071,10 +35725,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceThinkific" + operationId: "getSourceTwitter" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Thinkific#read + x-speakeasy-entity-operation: Source_Twitter#read put: tags: - "Sources" @@ -28082,7 +35736,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceThinkificPutRequest" + $ref: "#/components/schemas/SourceTwitterPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28090,10 +35744,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceThinkific" + operationId: "putSourceTwitter" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Thinkific#update + x-speakeasy-entity-operation: Source_Twitter#update delete: tags: - "Sources" @@ -28104,10 +35758,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceThinkific" + operationId: "deleteSourceTwitter" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Thinkific#delete + x-speakeasy-entity-operation: Source_Twitter#delete parameters: - name: "sourceId" schema: @@ -28115,13 +35769,13 @@ paths: type: "string" in: "path" required: true - /sources#Ticketmaster: + /sources#Typeform: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTicketmasterCreateRequest" + $ref: "#/components/schemas/SourceTypeformCreateRequest" tags: - "Sources" responses: @@ -28135,14 +35789,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTicketmaster" + operationId: "createSourceTypeform" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Ticketmaster#create - /sources/{sourceId}#Ticketmaster: + x-speakeasy-entity-operation: Source_Typeform#create + /sources/{sourceId}#Typeform: get: tags: - "Sources" @@ -28157,10 +35811,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTicketmaster" + operationId: "getSourceTypeform" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Ticketmaster#read + x-speakeasy-entity-operation: Source_Typeform#read put: tags: - "Sources" @@ -28168,7 +35822,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTicketmasterPutRequest" + $ref: "#/components/schemas/SourceTypeformPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28176,10 +35830,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTicketmaster" + operationId: "putSourceTypeform" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Ticketmaster#update + x-speakeasy-entity-operation: Source_Typeform#update delete: tags: - "Sources" @@ -28190,10 +35844,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTicketmaster" + operationId: "deleteSourceTypeform" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Ticketmaster#delete + x-speakeasy-entity-operation: Source_Typeform#delete parameters: - name: "sourceId" schema: @@ -28201,13 +35855,13 @@ paths: type: "string" in: "path" required: true - /sources#TiktokMarketing: + /sources#Ubidots: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTiktokMarketingCreateRequest" + $ref: "#/components/schemas/SourceUbidotsCreateRequest" tags: - "Sources" responses: @@ -28221,14 +35875,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTiktokMarketing" + operationId: "createSourceUbidots" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TiktokMarketing#create - /sources/{sourceId}#TiktokMarketing: + x-speakeasy-entity-operation: Source_Ubidots#create + /sources/{sourceId}#Ubidots: get: tags: - "Sources" @@ -28243,10 +35897,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTiktokMarketing" + operationId: "getSourceUbidots" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TiktokMarketing#read + x-speakeasy-entity-operation: Source_Ubidots#read put: tags: - "Sources" @@ -28254,7 +35908,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTiktokMarketingPutRequest" + $ref: "#/components/schemas/SourceUbidotsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28262,10 +35916,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTiktokMarketing" + operationId: "putSourceUbidots" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TiktokMarketing#update + x-speakeasy-entity-operation: Source_Ubidots#update delete: tags: - "Sources" @@ -28276,10 +35930,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTiktokMarketing" + operationId: "deleteSourceUbidots" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TiktokMarketing#delete + x-speakeasy-entity-operation: Source_Ubidots#delete parameters: - name: "sourceId" schema: @@ -28287,13 +35941,13 @@ paths: type: "string" in: "path" required: true - /sources#Timely: + /sources#Unleash: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTimelyCreateRequest" + $ref: "#/components/schemas/SourceUnleashCreateRequest" tags: - "Sources" responses: @@ -28307,14 +35961,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTimely" + operationId: "createSourceUnleash" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Timely#create - /sources/{sourceId}#Timely: + x-speakeasy-entity-operation: Source_Unleash#create + /sources/{sourceId}#Unleash: get: tags: - "Sources" @@ -28329,10 +35983,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTimely" + operationId: "getSourceUnleash" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Timely#read + x-speakeasy-entity-operation: Source_Unleash#read put: tags: - "Sources" @@ -28340,7 +35994,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTimelyPutRequest" + $ref: "#/components/schemas/SourceUnleashPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28348,10 +36002,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTimely" + operationId: "putSourceUnleash" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Timely#update + x-speakeasy-entity-operation: Source_Unleash#update delete: tags: - "Sources" @@ -28362,10 +36016,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTimely" + operationId: "deleteSourceUnleash" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Timely#delete + x-speakeasy-entity-operation: Source_Unleash#delete parameters: - name: "sourceId" schema: @@ -28373,13 +36027,13 @@ paths: type: "string" in: "path" required: true - /sources#Todoist: + /sources#Uppromote: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTodoistCreateRequest" + $ref: "#/components/schemas/SourceUppromoteCreateRequest" tags: - "Sources" responses: @@ -28393,14 +36047,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTodoist" + operationId: "createSourceUppromote" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Todoist#create - /sources/{sourceId}#Todoist: + x-speakeasy-entity-operation: Source_Uppromote#create + /sources/{sourceId}#Uppromote: get: tags: - "Sources" @@ -28415,10 +36069,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTodoist" + operationId: "getSourceUppromote" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Todoist#read + x-speakeasy-entity-operation: Source_Uppromote#read put: tags: - "Sources" @@ -28426,7 +36080,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTodoistPutRequest" + $ref: "#/components/schemas/SourceUppromotePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28434,10 +36088,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTodoist" + operationId: "putSourceUppromote" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Todoist#update + x-speakeasy-entity-operation: Source_Uppromote#update delete: tags: - "Sources" @@ -28448,10 +36102,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTodoist" + operationId: "deleteSourceUppromote" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Todoist#delete + x-speakeasy-entity-operation: Source_Uppromote#delete parameters: - name: "sourceId" schema: @@ -28459,13 +36113,13 @@ paths: type: "string" in: "path" required: true - /sources#Trello: + /sources#UsCensus: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTrelloCreateRequest" + $ref: "#/components/schemas/SourceUsCensusCreateRequest" tags: - "Sources" responses: @@ -28479,14 +36133,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTrello" + operationId: "createSourceUsCensus" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Trello#create - /sources/{sourceId}#Trello: + x-speakeasy-entity-operation: Source_UsCensus#create + /sources/{sourceId}#UsCensus: get: tags: - "Sources" @@ -28501,10 +36155,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTrello" + operationId: "getSourceUsCensus" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Trello#read + x-speakeasy-entity-operation: Source_UsCensus#read put: tags: - "Sources" @@ -28512,7 +36166,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTrelloPutRequest" + $ref: "#/components/schemas/SourceUsCensusPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28520,10 +36174,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTrello" + operationId: "putSourceUsCensus" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Trello#update + x-speakeasy-entity-operation: Source_UsCensus#update delete: tags: - "Sources" @@ -28534,10 +36188,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTrello" + operationId: "deleteSourceUsCensus" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Trello#delete + x-speakeasy-entity-operation: Source_UsCensus#delete parameters: - name: "sourceId" schema: @@ -28545,13 +36199,13 @@ paths: type: "string" in: "path" required: true - /sources#Trustpilot: + /sources#Uservoice: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTrustpilotCreateRequest" + $ref: "#/components/schemas/SourceUservoiceCreateRequest" tags: - "Sources" responses: @@ -28565,14 +36219,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTrustpilot" + operationId: "createSourceUservoice" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Trustpilot#create - /sources/{sourceId}#Trustpilot: + x-speakeasy-entity-operation: Source_Uservoice#create + /sources/{sourceId}#Uservoice: get: tags: - "Sources" @@ -28587,10 +36241,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTrustpilot" + operationId: "getSourceUservoice" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Trustpilot#read + x-speakeasy-entity-operation: Source_Uservoice#read put: tags: - "Sources" @@ -28598,7 +36252,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTrustpilotPutRequest" + $ref: "#/components/schemas/SourceUservoicePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28606,10 +36260,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTrustpilot" + operationId: "putSourceUservoice" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Trustpilot#update + x-speakeasy-entity-operation: Source_Uservoice#update delete: tags: - "Sources" @@ -28620,10 +36274,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTrustpilot" + operationId: "deleteSourceUservoice" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Trustpilot#delete + x-speakeasy-entity-operation: Source_Uservoice#delete parameters: - name: "sourceId" schema: @@ -28631,13 +36285,13 @@ paths: type: "string" in: "path" required: true - /sources#TvmazeSchedule: + /sources#Vantage: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTvmazeScheduleCreateRequest" + $ref: "#/components/schemas/SourceVantageCreateRequest" tags: - "Sources" responses: @@ -28651,14 +36305,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTvmazeSchedule" + operationId: "createSourceVantage" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TvmazeSchedule#create - /sources/{sourceId}#TvmazeSchedule: + x-speakeasy-entity-operation: Source_Vantage#create + /sources/{sourceId}#Vantage: get: tags: - "Sources" @@ -28673,10 +36327,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTvmazeSchedule" + operationId: "getSourceVantage" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TvmazeSchedule#read + x-speakeasy-entity-operation: Source_Vantage#read put: tags: - "Sources" @@ -28684,7 +36338,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTvmazeSchedulePutRequest" + $ref: "#/components/schemas/SourceVantagePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28692,10 +36346,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTvmazeSchedule" + operationId: "putSourceVantage" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TvmazeSchedule#update + x-speakeasy-entity-operation: Source_Vantage#update delete: tags: - "Sources" @@ -28706,10 +36360,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTvmazeSchedule" + operationId: "deleteSourceVantage" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TvmazeSchedule#delete + x-speakeasy-entity-operation: Source_Vantage#delete parameters: - name: "sourceId" schema: @@ -28717,13 +36371,13 @@ paths: type: "string" in: "path" required: true - /sources#TwelveData: + /sources#Veeqo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTwelveDataCreateRequest" + $ref: "#/components/schemas/SourceVeeqoCreateRequest" tags: - "Sources" responses: @@ -28737,14 +36391,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTwelveData" + operationId: "createSourceVeeqo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TwelveData#create - /sources/{sourceId}#TwelveData: + x-speakeasy-entity-operation: Source_Veeqo#create + /sources/{sourceId}#Veeqo: get: tags: - "Sources" @@ -28759,10 +36413,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTwelveData" + operationId: "getSourceVeeqo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TwelveData#read + x-speakeasy-entity-operation: Source_Veeqo#read put: tags: - "Sources" @@ -28770,7 +36424,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTwelveDataPutRequest" + $ref: "#/components/schemas/SourceVeeqoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28778,10 +36432,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTwelveData" + operationId: "putSourceVeeqo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TwelveData#update + x-speakeasy-entity-operation: Source_Veeqo#update delete: tags: - "Sources" @@ -28792,10 +36446,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTwelveData" + operationId: "deleteSourceVeeqo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TwelveData#delete + x-speakeasy-entity-operation: Source_Veeqo#delete parameters: - name: "sourceId" schema: @@ -28803,13 +36457,13 @@ paths: type: "string" in: "path" required: true - /sources#Twilio: + /sources#Vercel: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTwilioCreateRequest" + $ref: "#/components/schemas/SourceVercelCreateRequest" tags: - "Sources" responses: @@ -28823,14 +36477,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTwilio" + operationId: "createSourceVercel" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Twilio#create - /sources/{sourceId}#Twilio: + x-speakeasy-entity-operation: Source_Vercel#create + /sources/{sourceId}#Vercel: get: tags: - "Sources" @@ -28845,10 +36499,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTwilio" + operationId: "getSourceVercel" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Twilio#read + x-speakeasy-entity-operation: Source_Vercel#read put: tags: - "Sources" @@ -28856,7 +36510,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTwilioPutRequest" + $ref: "#/components/schemas/SourceVercelPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28864,10 +36518,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTwilio" + operationId: "putSourceVercel" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Twilio#update + x-speakeasy-entity-operation: Source_Vercel#update delete: tags: - "Sources" @@ -28878,10 +36532,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTwilio" + operationId: "deleteSourceVercel" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Twilio#delete + x-speakeasy-entity-operation: Source_Vercel#delete parameters: - name: "sourceId" schema: @@ -28889,13 +36543,13 @@ paths: type: "string" in: "path" required: true - /sources#TwilioTaskrouter: + /sources#VismaEconomic: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTwilioTaskrouterCreateRequest" + $ref: "#/components/schemas/SourceVismaEconomicCreateRequest" tags: - "Sources" responses: @@ -28909,14 +36563,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTwilioTaskrouter" + operationId: "createSourceVismaEconomic" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TwilioTaskrouter#create - /sources/{sourceId}#TwilioTaskrouter: + x-speakeasy-entity-operation: Source_VismaEconomic#create + /sources/{sourceId}#VismaEconomic: get: tags: - "Sources" @@ -28931,10 +36585,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTwilioTaskrouter" + operationId: "getSourceVismaEconomic" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TwilioTaskrouter#read + x-speakeasy-entity-operation: Source_VismaEconomic#read put: tags: - "Sources" @@ -28942,7 +36596,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTwilioTaskrouterPutRequest" + $ref: "#/components/schemas/SourceVismaEconomicPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -28950,10 +36604,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTwilioTaskrouter" + operationId: "putSourceVismaEconomic" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TwilioTaskrouter#update + x-speakeasy-entity-operation: Source_VismaEconomic#update delete: tags: - "Sources" @@ -28964,10 +36618,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTwilioTaskrouter" + operationId: "deleteSourceVismaEconomic" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_TwilioTaskrouter#delete + x-speakeasy-entity-operation: Source_VismaEconomic#delete parameters: - name: "sourceId" schema: @@ -28975,13 +36629,13 @@ paths: type: "string" in: "path" required: true - /sources#Twitter: + /sources#Vitally: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTwitterCreateRequest" + $ref: "#/components/schemas/SourceVitallyCreateRequest" tags: - "Sources" responses: @@ -28995,14 +36649,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTwitter" + operationId: "createSourceVitally" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Twitter#create - /sources/{sourceId}#Twitter: + x-speakeasy-entity-operation: Source_Vitally#create + /sources/{sourceId}#Vitally: get: tags: - "Sources" @@ -29017,10 +36671,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTwitter" + operationId: "getSourceVitally" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Twitter#read + x-speakeasy-entity-operation: Source_Vitally#read put: tags: - "Sources" @@ -29028,7 +36682,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTwitterPutRequest" + $ref: "#/components/schemas/SourceVitallyPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29036,10 +36690,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTwitter" + operationId: "putSourceVitally" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Twitter#update + x-speakeasy-entity-operation: Source_Vitally#update delete: tags: - "Sources" @@ -29050,10 +36704,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTwitter" + operationId: "deleteSourceVitally" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Twitter#delete + x-speakeasy-entity-operation: Source_Vitally#delete parameters: - name: "sourceId" schema: @@ -29061,13 +36715,13 @@ paths: type: "string" in: "path" required: true - /sources#Typeform: + /sources#Vwo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceTypeformCreateRequest" + $ref: "#/components/schemas/SourceVwoCreateRequest" tags: - "Sources" responses: @@ -29081,14 +36735,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceTypeform" + operationId: "createSourceVwo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Typeform#create - /sources/{sourceId}#Typeform: + x-speakeasy-entity-operation: Source_Vwo#create + /sources/{sourceId}#Vwo: get: tags: - "Sources" @@ -29103,10 +36757,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceTypeform" + operationId: "getSourceVwo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Typeform#read + x-speakeasy-entity-operation: Source_Vwo#read put: tags: - "Sources" @@ -29114,7 +36768,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceTypeformPutRequest" + $ref: "#/components/schemas/SourceVwoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29122,10 +36776,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceTypeform" + operationId: "putSourceVwo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Typeform#update + x-speakeasy-entity-operation: Source_Vwo#update delete: tags: - "Sources" @@ -29136,10 +36790,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceTypeform" + operationId: "deleteSourceVwo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Typeform#delete + x-speakeasy-entity-operation: Source_Vwo#delete parameters: - name: "sourceId" schema: @@ -29147,13 +36801,13 @@ paths: type: "string" in: "path" required: true - /sources#Unleash: + /sources#Waiteraid: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceUnleashCreateRequest" + $ref: "#/components/schemas/SourceWaiteraidCreateRequest" tags: - "Sources" responses: @@ -29167,14 +36821,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceUnleash" + operationId: "createSourceWaiteraid" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Unleash#create - /sources/{sourceId}#Unleash: + x-speakeasy-entity-operation: Source_Waiteraid#create + /sources/{sourceId}#Waiteraid: get: tags: - "Sources" @@ -29189,10 +36843,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceUnleash" + operationId: "getSourceWaiteraid" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Unleash#read + x-speakeasy-entity-operation: Source_Waiteraid#read put: tags: - "Sources" @@ -29200,7 +36854,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceUnleashPutRequest" + $ref: "#/components/schemas/SourceWaiteraidPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29208,10 +36862,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceUnleash" + operationId: "putSourceWaiteraid" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Unleash#update + x-speakeasy-entity-operation: Source_Waiteraid#update delete: tags: - "Sources" @@ -29222,10 +36876,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceUnleash" + operationId: "deleteSourceWaiteraid" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Unleash#delete + x-speakeasy-entity-operation: Source_Waiteraid#delete parameters: - name: "sourceId" schema: @@ -29233,13 +36887,13 @@ paths: type: "string" in: "path" required: true - /sources#Uppromote: + /sources#WasabiStatsApi: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceUppromoteCreateRequest" + $ref: "#/components/schemas/SourceWasabiStatsApiCreateRequest" tags: - "Sources" responses: @@ -29253,14 +36907,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceUppromote" + operationId: "createSourceWasabiStatsApi" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Uppromote#create - /sources/{sourceId}#Uppromote: + x-speakeasy-entity-operation: Source_WasabiStatsApi#create + /sources/{sourceId}#WasabiStatsApi: get: tags: - "Sources" @@ -29275,10 +36929,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceUppromote" + operationId: "getSourceWasabiStatsApi" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Uppromote#read + x-speakeasy-entity-operation: Source_WasabiStatsApi#read put: tags: - "Sources" @@ -29286,7 +36940,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceUppromotePutRequest" + $ref: "#/components/schemas/SourceWasabiStatsApiPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29294,10 +36948,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceUppromote" + operationId: "putSourceWasabiStatsApi" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Uppromote#update + x-speakeasy-entity-operation: Source_WasabiStatsApi#update delete: tags: - "Sources" @@ -29308,10 +36962,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceUppromote" + operationId: "deleteSourceWasabiStatsApi" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Uppromote#delete + x-speakeasy-entity-operation: Source_WasabiStatsApi#delete parameters: - name: "sourceId" schema: @@ -29319,13 +36973,13 @@ paths: type: "string" in: "path" required: true - /sources#UsCensus: + /sources#Weatherstack: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceUsCensusCreateRequest" + $ref: "#/components/schemas/SourceWeatherstackCreateRequest" tags: - "Sources" responses: @@ -29339,14 +36993,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceUsCensus" + operationId: "createSourceWeatherstack" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_UsCensus#create - /sources/{sourceId}#UsCensus: + x-speakeasy-entity-operation: Source_Weatherstack#create + /sources/{sourceId}#Weatherstack: get: tags: - "Sources" @@ -29361,10 +37015,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceUsCensus" + operationId: "getSourceWeatherstack" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_UsCensus#read + x-speakeasy-entity-operation: Source_Weatherstack#read put: tags: - "Sources" @@ -29372,7 +37026,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceUsCensusPutRequest" + $ref: "#/components/schemas/SourceWeatherstackPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29380,10 +37034,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceUsCensus" + operationId: "putSourceWeatherstack" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_UsCensus#update + x-speakeasy-entity-operation: Source_Weatherstack#update delete: tags: - "Sources" @@ -29394,10 +37048,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceUsCensus" + operationId: "deleteSourceWeatherstack" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_UsCensus#delete + x-speakeasy-entity-operation: Source_Weatherstack#delete parameters: - name: "sourceId" schema: @@ -29405,13 +37059,13 @@ paths: type: "string" in: "path" required: true - /sources#Uservoice: + /sources#WebScrapper: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceUservoiceCreateRequest" + $ref: "#/components/schemas/SourceWebScrapperCreateRequest" tags: - "Sources" responses: @@ -29425,14 +37079,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceUservoice" + operationId: "createSourceWebScrapper" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Uservoice#create - /sources/{sourceId}#Uservoice: + x-speakeasy-entity-operation: Source_WebScrapper#create + /sources/{sourceId}#WebScrapper: get: tags: - "Sources" @@ -29447,10 +37101,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceUservoice" + operationId: "getSourceWebScrapper" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Uservoice#read + x-speakeasy-entity-operation: Source_WebScrapper#read put: tags: - "Sources" @@ -29458,7 +37112,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceUservoicePutRequest" + $ref: "#/components/schemas/SourceWebScrapperPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29466,10 +37120,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceUservoice" + operationId: "putSourceWebScrapper" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Uservoice#update + x-speakeasy-entity-operation: Source_WebScrapper#update delete: tags: - "Sources" @@ -29480,10 +37134,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceUservoice" + operationId: "deleteSourceWebScrapper" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Uservoice#delete + x-speakeasy-entity-operation: Source_WebScrapper#delete parameters: - name: "sourceId" schema: @@ -29491,13 +37145,13 @@ paths: type: "string" in: "path" required: true - /sources#Vantage: + /sources#Webflow: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceVantageCreateRequest" + $ref: "#/components/schemas/SourceWebflowCreateRequest" tags: - "Sources" responses: @@ -29511,14 +37165,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceVantage" + operationId: "createSourceWebflow" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vantage#create - /sources/{sourceId}#Vantage: + x-speakeasy-entity-operation: Source_Webflow#create + /sources/{sourceId}#Webflow: get: tags: - "Sources" @@ -29533,10 +37187,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceVantage" + operationId: "getSourceWebflow" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vantage#read + x-speakeasy-entity-operation: Source_Webflow#read put: tags: - "Sources" @@ -29544,7 +37198,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceVantagePutRequest" + $ref: "#/components/schemas/SourceWebflowPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29552,10 +37206,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceVantage" + operationId: "putSourceWebflow" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vantage#update + x-speakeasy-entity-operation: Source_Webflow#update delete: tags: - "Sources" @@ -29566,10 +37220,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceVantage" + operationId: "deleteSourceWebflow" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vantage#delete + x-speakeasy-entity-operation: Source_Webflow#delete parameters: - name: "sourceId" schema: @@ -29577,13 +37231,13 @@ paths: type: "string" in: "path" required: true - /sources#Veeqo: + /sources#WhenIWork: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceVeeqoCreateRequest" + $ref: "#/components/schemas/SourceWhenIWorkCreateRequest" tags: - "Sources" responses: @@ -29597,14 +37251,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceVeeqo" + operationId: "createSourceWhenIWork" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Veeqo#create - /sources/{sourceId}#Veeqo: + x-speakeasy-entity-operation: Source_WhenIWork#create + /sources/{sourceId}#WhenIWork: get: tags: - "Sources" @@ -29619,10 +37273,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceVeeqo" + operationId: "getSourceWhenIWork" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Veeqo#read + x-speakeasy-entity-operation: Source_WhenIWork#read put: tags: - "Sources" @@ -29630,7 +37284,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceVeeqoPutRequest" + $ref: "#/components/schemas/SourceWhenIWorkPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29638,10 +37292,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceVeeqo" + operationId: "putSourceWhenIWork" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Veeqo#update + x-speakeasy-entity-operation: Source_WhenIWork#update delete: tags: - "Sources" @@ -29652,10 +37306,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceVeeqo" + operationId: "deleteSourceWhenIWork" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Veeqo#delete + x-speakeasy-entity-operation: Source_WhenIWork#delete parameters: - name: "sourceId" schema: @@ -29663,13 +37317,13 @@ paths: type: "string" in: "path" required: true - /sources#VismaEconomic: + /sources#WhiskyHunter: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceVismaEconomicCreateRequest" + $ref: "#/components/schemas/SourceWhiskyHunterCreateRequest" tags: - "Sources" responses: @@ -29683,14 +37337,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceVismaEconomic" + operationId: "createSourceWhiskyHunter" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_VismaEconomic#create - /sources/{sourceId}#VismaEconomic: + x-speakeasy-entity-operation: Source_WhiskyHunter#create + /sources/{sourceId}#WhiskyHunter: get: tags: - "Sources" @@ -29705,10 +37359,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceVismaEconomic" + operationId: "getSourceWhiskyHunter" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_VismaEconomic#read + x-speakeasy-entity-operation: Source_WhiskyHunter#read put: tags: - "Sources" @@ -29716,7 +37370,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceVismaEconomicPutRequest" + $ref: "#/components/schemas/SourceWhiskyHunterPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29724,10 +37378,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceVismaEconomic" + operationId: "putSourceWhiskyHunter" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_VismaEconomic#update + x-speakeasy-entity-operation: Source_WhiskyHunter#update delete: tags: - "Sources" @@ -29738,10 +37392,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceVismaEconomic" + operationId: "deleteSourceWhiskyHunter" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_VismaEconomic#delete + x-speakeasy-entity-operation: Source_WhiskyHunter#delete parameters: - name: "sourceId" schema: @@ -29749,13 +37403,13 @@ paths: type: "string" in: "path" required: true - /sources#Vitally: + /sources#WikipediaPageviews: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceVitallyCreateRequest" + $ref: "#/components/schemas/SourceWikipediaPageviewsCreateRequest" tags: - "Sources" responses: @@ -29769,14 +37423,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceVitally" + operationId: "createSourceWikipediaPageviews" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vitally#create - /sources/{sourceId}#Vitally: + x-speakeasy-entity-operation: Source_WikipediaPageviews#create + /sources/{sourceId}#WikipediaPageviews: get: tags: - "Sources" @@ -29791,10 +37445,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceVitally" + operationId: "getSourceWikipediaPageviews" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vitally#read + x-speakeasy-entity-operation: Source_WikipediaPageviews#read put: tags: - "Sources" @@ -29802,7 +37456,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceVitallyPutRequest" + $ref: "#/components/schemas/SourceWikipediaPageviewsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29810,10 +37464,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceVitally" + operationId: "putSourceWikipediaPageviews" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vitally#update + x-speakeasy-entity-operation: Source_WikipediaPageviews#update delete: tags: - "Sources" @@ -29824,10 +37478,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceVitally" + operationId: "deleteSourceWikipediaPageviews" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vitally#delete + x-speakeasy-entity-operation: Source_WikipediaPageviews#delete parameters: - name: "sourceId" schema: @@ -29835,13 +37489,13 @@ paths: type: "string" in: "path" required: true - /sources#Vwo: + /sources#Woocommerce: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceVwoCreateRequest" + $ref: "#/components/schemas/SourceWoocommerceCreateRequest" tags: - "Sources" responses: @@ -29855,14 +37509,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceVwo" + operationId: "createSourceWoocommerce" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vwo#create - /sources/{sourceId}#Vwo: + x-speakeasy-entity-operation: Source_Woocommerce#create + /sources/{sourceId}#Woocommerce: get: tags: - "Sources" @@ -29877,10 +37531,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceVwo" + operationId: "getSourceWoocommerce" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vwo#read + x-speakeasy-entity-operation: Source_Woocommerce#read put: tags: - "Sources" @@ -29888,7 +37542,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceVwoPutRequest" + $ref: "#/components/schemas/SourceWoocommercePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29896,10 +37550,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceVwo" + operationId: "putSourceWoocommerce" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vwo#update + x-speakeasy-entity-operation: Source_Woocommerce#update delete: tags: - "Sources" @@ -29910,10 +37564,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceVwo" + operationId: "deleteSourceWoocommerce" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Vwo#delete + x-speakeasy-entity-operation: Source_Woocommerce#delete parameters: - name: "sourceId" schema: @@ -29921,13 +37575,13 @@ paths: type: "string" in: "path" required: true - /sources#Waiteraid: + /sources#Wordpress: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWaiteraidCreateRequest" + $ref: "#/components/schemas/SourceWordpressCreateRequest" tags: - "Sources" responses: @@ -29941,14 +37595,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWaiteraid" + operationId: "createSourceWordpress" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Waiteraid#create - /sources/{sourceId}#Waiteraid: + x-speakeasy-entity-operation: Source_Wordpress#create + /sources/{sourceId}#Wordpress: get: tags: - "Sources" @@ -29963,10 +37617,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWaiteraid" + operationId: "getSourceWordpress" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Waiteraid#read + x-speakeasy-entity-operation: Source_Wordpress#read put: tags: - "Sources" @@ -29974,7 +37628,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWaiteraidPutRequest" + $ref: "#/components/schemas/SourceWordpressPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -29982,10 +37636,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWaiteraid" + operationId: "putSourceWordpress" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Waiteraid#update + x-speakeasy-entity-operation: Source_Wordpress#update delete: tags: - "Sources" @@ -29996,10 +37650,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWaiteraid" + operationId: "deleteSourceWordpress" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Waiteraid#delete + x-speakeasy-entity-operation: Source_Wordpress#delete parameters: - name: "sourceId" schema: @@ -30007,13 +37661,13 @@ paths: type: "string" in: "path" required: true - /sources#WasabiStatsApi: + /sources#Workable: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWasabiStatsApiCreateRequest" + $ref: "#/components/schemas/SourceWorkableCreateRequest" tags: - "Sources" responses: @@ -30027,14 +37681,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWasabiStatsApi" + operationId: "createSourceWorkable" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WasabiStatsApi#create - /sources/{sourceId}#WasabiStatsApi: + x-speakeasy-entity-operation: Source_Workable#create + /sources/{sourceId}#Workable: get: tags: - "Sources" @@ -30049,10 +37703,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWasabiStatsApi" + operationId: "getSourceWorkable" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WasabiStatsApi#read + x-speakeasy-entity-operation: Source_Workable#read put: tags: - "Sources" @@ -30060,7 +37714,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWasabiStatsApiPutRequest" + $ref: "#/components/schemas/SourceWorkablePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30068,10 +37722,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWasabiStatsApi" + operationId: "putSourceWorkable" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WasabiStatsApi#update + x-speakeasy-entity-operation: Source_Workable#update delete: tags: - "Sources" @@ -30082,10 +37736,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWasabiStatsApi" + operationId: "deleteSourceWorkable" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WasabiStatsApi#delete + x-speakeasy-entity-operation: Source_Workable#delete parameters: - name: "sourceId" schema: @@ -30093,13 +37747,13 @@ paths: type: "string" in: "path" required: true - /sources#Weatherstack: + /sources#Workflowmax: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWeatherstackCreateRequest" + $ref: "#/components/schemas/SourceWorkflowmaxCreateRequest" tags: - "Sources" responses: @@ -30113,14 +37767,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWeatherstack" + operationId: "createSourceWorkflowmax" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Weatherstack#create - /sources/{sourceId}#Weatherstack: + x-speakeasy-entity-operation: Source_Workflowmax#create + /sources/{sourceId}#Workflowmax: get: tags: - "Sources" @@ -30135,10 +37789,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWeatherstack" + operationId: "getSourceWorkflowmax" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Weatherstack#read + x-speakeasy-entity-operation: Source_Workflowmax#read put: tags: - "Sources" @@ -30146,7 +37800,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWeatherstackPutRequest" + $ref: "#/components/schemas/SourceWorkflowmaxPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30154,10 +37808,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWeatherstack" + operationId: "putSourceWorkflowmax" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Weatherstack#update + x-speakeasy-entity-operation: Source_Workflowmax#update delete: tags: - "Sources" @@ -30168,10 +37822,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWeatherstack" + operationId: "deleteSourceWorkflowmax" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Weatherstack#delete + x-speakeasy-entity-operation: Source_Workflowmax#delete parameters: - name: "sourceId" schema: @@ -30179,13 +37833,13 @@ paths: type: "string" in: "path" required: true - /sources#Webflow: + /sources#Workramp: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWebflowCreateRequest" + $ref: "#/components/schemas/SourceWorkrampCreateRequest" tags: - "Sources" responses: @@ -30199,14 +37853,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWebflow" + operationId: "createSourceWorkramp" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Webflow#create - /sources/{sourceId}#Webflow: + x-speakeasy-entity-operation: Source_Workramp#create + /sources/{sourceId}#Workramp: get: tags: - "Sources" @@ -30221,10 +37875,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWebflow" + operationId: "getSourceWorkramp" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Webflow#read + x-speakeasy-entity-operation: Source_Workramp#read put: tags: - "Sources" @@ -30232,7 +37886,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWebflowPutRequest" + $ref: "#/components/schemas/SourceWorkrampPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30240,10 +37894,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWebflow" + operationId: "putSourceWorkramp" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Webflow#update + x-speakeasy-entity-operation: Source_Workramp#update delete: tags: - "Sources" @@ -30254,10 +37908,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWebflow" + operationId: "deleteSourceWorkramp" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Webflow#delete + x-speakeasy-entity-operation: Source_Workramp#delete parameters: - name: "sourceId" schema: @@ -30265,13 +37919,13 @@ paths: type: "string" in: "path" required: true - /sources#WhenIWork: + /sources#Wrike: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWhenIWorkCreateRequest" + $ref: "#/components/schemas/SourceWrikeCreateRequest" tags: - "Sources" responses: @@ -30285,14 +37939,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWhenIWork" + operationId: "createSourceWrike" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WhenIWork#create - /sources/{sourceId}#WhenIWork: + x-speakeasy-entity-operation: Source_Wrike#create + /sources/{sourceId}#Wrike: get: tags: - "Sources" @@ -30307,10 +37961,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWhenIWork" + operationId: "getSourceWrike" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WhenIWork#read + x-speakeasy-entity-operation: Source_Wrike#read put: tags: - "Sources" @@ -30318,7 +37972,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWhenIWorkPutRequest" + $ref: "#/components/schemas/SourceWrikePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30326,10 +37980,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWhenIWork" + operationId: "putSourceWrike" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WhenIWork#update + x-speakeasy-entity-operation: Source_Wrike#update delete: tags: - "Sources" @@ -30340,10 +37994,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWhenIWork" + operationId: "deleteSourceWrike" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WhenIWork#delete + x-speakeasy-entity-operation: Source_Wrike#delete parameters: - name: "sourceId" schema: @@ -30351,13 +38005,13 @@ paths: type: "string" in: "path" required: true - /sources#WhiskyHunter: + /sources#Wufoo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWhiskyHunterCreateRequest" + $ref: "#/components/schemas/SourceWufooCreateRequest" tags: - "Sources" responses: @@ -30371,14 +38025,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWhiskyHunter" + operationId: "createSourceWufoo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WhiskyHunter#create - /sources/{sourceId}#WhiskyHunter: + x-speakeasy-entity-operation: Source_Wufoo#create + /sources/{sourceId}#Wufoo: get: tags: - "Sources" @@ -30393,10 +38047,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWhiskyHunter" + operationId: "getSourceWufoo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WhiskyHunter#read + x-speakeasy-entity-operation: Source_Wufoo#read put: tags: - "Sources" @@ -30404,7 +38058,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWhiskyHunterPutRequest" + $ref: "#/components/schemas/SourceWufooPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30412,10 +38066,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWhiskyHunter" + operationId: "putSourceWufoo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WhiskyHunter#update + x-speakeasy-entity-operation: Source_Wufoo#update delete: tags: - "Sources" @@ -30426,10 +38080,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWhiskyHunter" + operationId: "deleteSourceWufoo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WhiskyHunter#delete + x-speakeasy-entity-operation: Source_Wufoo#delete parameters: - name: "sourceId" schema: @@ -30437,13 +38091,13 @@ paths: type: "string" in: "path" required: true - /sources#WikipediaPageviews: + /sources#Xkcd: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWikipediaPageviewsCreateRequest" + $ref: "#/components/schemas/SourceXkcdCreateRequest" tags: - "Sources" responses: @@ -30457,14 +38111,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWikipediaPageviews" + operationId: "createSourceXkcd" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WikipediaPageviews#create - /sources/{sourceId}#WikipediaPageviews: + x-speakeasy-entity-operation: Source_Xkcd#create + /sources/{sourceId}#Xkcd: get: tags: - "Sources" @@ -30479,10 +38133,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWikipediaPageviews" + operationId: "getSourceXkcd" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WikipediaPageviews#read + x-speakeasy-entity-operation: Source_Xkcd#read put: tags: - "Sources" @@ -30490,7 +38144,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWikipediaPageviewsPutRequest" + $ref: "#/components/schemas/SourceXkcdPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30498,10 +38152,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWikipediaPageviews" + operationId: "putSourceXkcd" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WikipediaPageviews#update + x-speakeasy-entity-operation: Source_Xkcd#update delete: tags: - "Sources" @@ -30512,10 +38166,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWikipediaPageviews" + operationId: "deleteSourceXkcd" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_WikipediaPageviews#delete + x-speakeasy-entity-operation: Source_Xkcd#delete parameters: - name: "sourceId" schema: @@ -30523,13 +38177,13 @@ paths: type: "string" in: "path" required: true - /sources#Woocommerce: + /sources#Xsolla: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWoocommerceCreateRequest" + $ref: "#/components/schemas/SourceXsollaCreateRequest" tags: - "Sources" responses: @@ -30543,14 +38197,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWoocommerce" + operationId: "createSourceXsolla" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Woocommerce#create - /sources/{sourceId}#Woocommerce: + x-speakeasy-entity-operation: Source_Xsolla#create + /sources/{sourceId}#Xsolla: get: tags: - "Sources" @@ -30565,10 +38219,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWoocommerce" + operationId: "getSourceXsolla" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Woocommerce#read + x-speakeasy-entity-operation: Source_Xsolla#read put: tags: - "Sources" @@ -30576,7 +38230,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWoocommercePutRequest" + $ref: "#/components/schemas/SourceXsollaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30584,10 +38238,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWoocommerce" + operationId: "putSourceXsolla" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Woocommerce#update + x-speakeasy-entity-operation: Source_Xsolla#update delete: tags: - "Sources" @@ -30598,10 +38252,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWoocommerce" + operationId: "deleteSourceXsolla" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Woocommerce#delete + x-speakeasy-entity-operation: Source_Xsolla#delete parameters: - name: "sourceId" schema: @@ -30609,13 +38263,13 @@ paths: type: "string" in: "path" required: true - /sources#Wordpress: + /sources#YahooFinancePrice: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWordpressCreateRequest" + $ref: "#/components/schemas/SourceYahooFinancePriceCreateRequest" tags: - "Sources" responses: @@ -30629,14 +38283,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWordpress" + operationId: "createSourceYahooFinancePrice" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Wordpress#create - /sources/{sourceId}#Wordpress: + x-speakeasy-entity-operation: Source_YahooFinancePrice#create + /sources/{sourceId}#YahooFinancePrice: get: tags: - "Sources" @@ -30651,10 +38305,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWordpress" + operationId: "getSourceYahooFinancePrice" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Wordpress#read + x-speakeasy-entity-operation: Source_YahooFinancePrice#read put: tags: - "Sources" @@ -30662,7 +38316,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWordpressPutRequest" + $ref: "#/components/schemas/SourceYahooFinancePricePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30670,10 +38324,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWordpress" + operationId: "putSourceYahooFinancePrice" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Wordpress#update + x-speakeasy-entity-operation: Source_YahooFinancePrice#update delete: tags: - "Sources" @@ -30684,10 +38338,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWordpress" + operationId: "deleteSourceYahooFinancePrice" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Wordpress#delete + x-speakeasy-entity-operation: Source_YahooFinancePrice#delete parameters: - name: "sourceId" schema: @@ -30695,13 +38349,13 @@ paths: type: "string" in: "path" required: true - /sources#Workable: + /sources#YandexMetrica: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWorkableCreateRequest" + $ref: "#/components/schemas/SourceYandexMetricaCreateRequest" tags: - "Sources" responses: @@ -30715,14 +38369,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWorkable" + operationId: "createSourceYandexMetrica" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workable#create - /sources/{sourceId}#Workable: + x-speakeasy-entity-operation: Source_YandexMetrica#create + /sources/{sourceId}#YandexMetrica: get: tags: - "Sources" @@ -30737,10 +38391,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWorkable" + operationId: "getSourceYandexMetrica" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workable#read + x-speakeasy-entity-operation: Source_YandexMetrica#read put: tags: - "Sources" @@ -30748,7 +38402,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWorkablePutRequest" + $ref: "#/components/schemas/SourceYandexMetricaPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30756,10 +38410,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWorkable" + operationId: "putSourceYandexMetrica" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workable#update + x-speakeasy-entity-operation: Source_YandexMetrica#update delete: tags: - "Sources" @@ -30770,10 +38424,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWorkable" + operationId: "deleteSourceYandexMetrica" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workable#delete + x-speakeasy-entity-operation: Source_YandexMetrica#delete parameters: - name: "sourceId" schema: @@ -30781,13 +38435,13 @@ paths: type: "string" in: "path" required: true - /sources#Workflowmax: + /sources#Yotpo: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWorkflowmaxCreateRequest" + $ref: "#/components/schemas/SourceYotpoCreateRequest" tags: - "Sources" responses: @@ -30801,14 +38455,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWorkflowmax" + operationId: "createSourceYotpo" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workflowmax#create - /sources/{sourceId}#Workflowmax: + x-speakeasy-entity-operation: Source_Yotpo#create + /sources/{sourceId}#Yotpo: get: tags: - "Sources" @@ -30823,10 +38477,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWorkflowmax" + operationId: "getSourceYotpo" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workflowmax#read + x-speakeasy-entity-operation: Source_Yotpo#read put: tags: - "Sources" @@ -30834,7 +38488,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWorkflowmaxPutRequest" + $ref: "#/components/schemas/SourceYotpoPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30842,10 +38496,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWorkflowmax" + operationId: "putSourceYotpo" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workflowmax#update + x-speakeasy-entity-operation: Source_Yotpo#update delete: tags: - "Sources" @@ -30856,10 +38510,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWorkflowmax" + operationId: "deleteSourceYotpo" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workflowmax#delete + x-speakeasy-entity-operation: Source_Yotpo#delete parameters: - name: "sourceId" schema: @@ -30867,13 +38521,13 @@ paths: type: "string" in: "path" required: true - /sources#Workramp: + /sources#YouNeedABudgetYnab: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWorkrampCreateRequest" + $ref: "#/components/schemas/SourceYouNeedABudgetYnabCreateRequest" tags: - "Sources" responses: @@ -30887,14 +38541,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWorkramp" + operationId: "createSourceYouNeedABudgetYnab" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workramp#create - /sources/{sourceId}#Workramp: + x-speakeasy-entity-operation: Source_YouNeedABudgetYnab#create + /sources/{sourceId}#YouNeedABudgetYnab: get: tags: - "Sources" @@ -30909,10 +38563,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWorkramp" + operationId: "getSourceYouNeedABudgetYnab" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workramp#read + x-speakeasy-entity-operation: Source_YouNeedABudgetYnab#read put: tags: - "Sources" @@ -30920,7 +38574,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWorkrampPutRequest" + $ref: "#/components/schemas/SourceYouNeedABudgetYnabPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -30928,10 +38582,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWorkramp" + operationId: "putSourceYouNeedABudgetYnab" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workramp#update + x-speakeasy-entity-operation: Source_YouNeedABudgetYnab#update delete: tags: - "Sources" @@ -30942,10 +38596,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWorkramp" + operationId: "deleteSourceYouNeedABudgetYnab" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Workramp#delete + x-speakeasy-entity-operation: Source_YouNeedABudgetYnab#delete parameters: - name: "sourceId" schema: @@ -30953,13 +38607,13 @@ paths: type: "string" in: "path" required: true - /sources#Wrike: + /sources#YoutubeAnalytics: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceWrikeCreateRequest" + $ref: "#/components/schemas/SourceYoutubeAnalyticsCreateRequest" tags: - "Sources" responses: @@ -30973,14 +38627,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceWrike" + operationId: "createSourceYoutubeAnalytics" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Wrike#create - /sources/{sourceId}#Wrike: + x-speakeasy-entity-operation: Source_YoutubeAnalytics#create + /sources/{sourceId}#YoutubeAnalytics: get: tags: - "Sources" @@ -30995,10 +38649,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceWrike" + operationId: "getSourceYoutubeAnalytics" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Wrike#read + x-speakeasy-entity-operation: Source_YoutubeAnalytics#read put: tags: - "Sources" @@ -31006,7 +38660,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceWrikePutRequest" + $ref: "#/components/schemas/SourceYoutubeAnalyticsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31014,10 +38668,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceWrike" + operationId: "putSourceYoutubeAnalytics" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Wrike#update + x-speakeasy-entity-operation: Source_YoutubeAnalytics#update delete: tags: - "Sources" @@ -31028,10 +38682,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceWrike" + operationId: "deleteSourceYoutubeAnalytics" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Wrike#delete + x-speakeasy-entity-operation: Source_YoutubeAnalytics#delete parameters: - name: "sourceId" schema: @@ -31039,13 +38693,13 @@ paths: type: "string" in: "path" required: true - /sources#Xkcd: + /sources#YoutubeData: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceXkcdCreateRequest" + $ref: "#/components/schemas/SourceYoutubeDataCreateRequest" tags: - "Sources" responses: @@ -31059,14 +38713,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceXkcd" + operationId: "createSourceYoutubeData" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Xkcd#create - /sources/{sourceId}#Xkcd: + x-speakeasy-entity-operation: Source_YoutubeData#create + /sources/{sourceId}#YoutubeData: get: tags: - "Sources" @@ -31081,10 +38735,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceXkcd" + operationId: "getSourceYoutubeData" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Xkcd#read + x-speakeasy-entity-operation: Source_YoutubeData#read put: tags: - "Sources" @@ -31092,7 +38746,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceXkcdPutRequest" + $ref: "#/components/schemas/SourceYoutubeDataPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31100,10 +38754,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceXkcd" + operationId: "putSourceYoutubeData" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Xkcd#update + x-speakeasy-entity-operation: Source_YoutubeData#update delete: tags: - "Sources" @@ -31114,10 +38768,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceXkcd" + operationId: "deleteSourceYoutubeData" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Xkcd#delete + x-speakeasy-entity-operation: Source_YoutubeData#delete parameters: - name: "sourceId" schema: @@ -31125,13 +38779,13 @@ paths: type: "string" in: "path" required: true - /sources#Xsolla: + /sources#ZapierSupportedStorage: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceXsollaCreateRequest" + $ref: "#/components/schemas/SourceZapierSupportedStorageCreateRequest" tags: - "Sources" responses: @@ -31145,14 +38799,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceXsolla" + operationId: "createSourceZapierSupportedStorage" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Xsolla#create - /sources/{sourceId}#Xsolla: + x-speakeasy-entity-operation: Source_ZapierSupportedStorage#create + /sources/{sourceId}#ZapierSupportedStorage: get: tags: - "Sources" @@ -31167,10 +38821,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceXsolla" + operationId: "getSourceZapierSupportedStorage" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Xsolla#read + x-speakeasy-entity-operation: Source_ZapierSupportedStorage#read put: tags: - "Sources" @@ -31178,7 +38832,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceXsollaPutRequest" + $ref: "#/components/schemas/SourceZapierSupportedStoragePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31186,10 +38840,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceXsolla" + operationId: "putSourceZapierSupportedStorage" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Xsolla#update + x-speakeasy-entity-operation: Source_ZapierSupportedStorage#update delete: tags: - "Sources" @@ -31200,10 +38854,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceXsolla" + operationId: "deleteSourceZapierSupportedStorage" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Xsolla#delete + x-speakeasy-entity-operation: Source_ZapierSupportedStorage#delete parameters: - name: "sourceId" schema: @@ -31211,13 +38865,13 @@ paths: type: "string" in: "path" required: true - /sources#YahooFinancePrice: + /sources#ZendeskChat: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceYahooFinancePriceCreateRequest" + $ref: "#/components/schemas/SourceZendeskChatCreateRequest" tags: - "Sources" responses: @@ -31231,14 +38885,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceYahooFinancePrice" + operationId: "createSourceZendeskChat" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YahooFinancePrice#create - /sources/{sourceId}#YahooFinancePrice: + x-speakeasy-entity-operation: Source_ZendeskChat#create + /sources/{sourceId}#ZendeskChat: get: tags: - "Sources" @@ -31253,10 +38907,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceYahooFinancePrice" + operationId: "getSourceZendeskChat" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YahooFinancePrice#read + x-speakeasy-entity-operation: Source_ZendeskChat#read put: tags: - "Sources" @@ -31264,7 +38918,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceYahooFinancePricePutRequest" + $ref: "#/components/schemas/SourceZendeskChatPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31272,10 +38926,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceYahooFinancePrice" + operationId: "putSourceZendeskChat" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YahooFinancePrice#update + x-speakeasy-entity-operation: Source_ZendeskChat#update delete: tags: - "Sources" @@ -31286,10 +38940,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceYahooFinancePrice" + operationId: "deleteSourceZendeskChat" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YahooFinancePrice#delete + x-speakeasy-entity-operation: Source_ZendeskChat#delete parameters: - name: "sourceId" schema: @@ -31297,13 +38951,13 @@ paths: type: "string" in: "path" required: true - /sources#YandexMetrica: + /sources#ZendeskSunshine: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceYandexMetricaCreateRequest" + $ref: "#/components/schemas/SourceZendeskSunshineCreateRequest" tags: - "Sources" responses: @@ -31317,14 +38971,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceYandexMetrica" + operationId: "createSourceZendeskSunshine" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YandexMetrica#create - /sources/{sourceId}#YandexMetrica: + x-speakeasy-entity-operation: Source_ZendeskSunshine#create + /sources/{sourceId}#ZendeskSunshine: get: tags: - "Sources" @@ -31339,10 +38993,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceYandexMetrica" + operationId: "getSourceZendeskSunshine" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YandexMetrica#read + x-speakeasy-entity-operation: Source_ZendeskSunshine#read put: tags: - "Sources" @@ -31350,7 +39004,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceYandexMetricaPutRequest" + $ref: "#/components/schemas/SourceZendeskSunshinePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31358,10 +39012,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceYandexMetrica" + operationId: "putSourceZendeskSunshine" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YandexMetrica#update + x-speakeasy-entity-operation: Source_ZendeskSunshine#update delete: tags: - "Sources" @@ -31372,10 +39026,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceYandexMetrica" + operationId: "deleteSourceZendeskSunshine" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YandexMetrica#delete + x-speakeasy-entity-operation: Source_ZendeskSunshine#delete parameters: - name: "sourceId" schema: @@ -31383,13 +39037,13 @@ paths: type: "string" in: "path" required: true - /sources#Yotpo: + /sources#ZendeskSupport: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceYotpoCreateRequest" + $ref: "#/components/schemas/SourceZendeskSupportCreateRequest" tags: - "Sources" responses: @@ -31403,14 +39057,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceYotpo" + operationId: "createSourceZendeskSupport" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Yotpo#create - /sources/{sourceId}#Yotpo: + x-speakeasy-entity-operation: Source_ZendeskSupport#create + /sources/{sourceId}#ZendeskSupport: get: tags: - "Sources" @@ -31425,10 +39079,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceYotpo" + operationId: "getSourceZendeskSupport" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Yotpo#read + x-speakeasy-entity-operation: Source_ZendeskSupport#read put: tags: - "Sources" @@ -31436,7 +39090,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceYotpoPutRequest" + $ref: "#/components/schemas/SourceZendeskSupportPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31444,10 +39098,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceYotpo" + operationId: "putSourceZendeskSupport" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Yotpo#update + x-speakeasy-entity-operation: Source_ZendeskSupport#update delete: tags: - "Sources" @@ -31458,10 +39112,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceYotpo" + operationId: "deleteSourceZendeskSupport" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Yotpo#delete + x-speakeasy-entity-operation: Source_ZendeskSupport#delete parameters: - name: "sourceId" schema: @@ -31469,13 +39123,13 @@ paths: type: "string" in: "path" required: true - /sources#YouNeedABudgetYnab: + /sources#ZendeskTalk: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceYouNeedABudgetYnabCreateRequest" + $ref: "#/components/schemas/SourceZendeskTalkCreateRequest" tags: - "Sources" responses: @@ -31489,14 +39143,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceYouNeedABudgetYnab" + operationId: "createSourceZendeskTalk" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YouNeedABudgetYnab#create - /sources/{sourceId}#YouNeedABudgetYnab: + x-speakeasy-entity-operation: Source_ZendeskTalk#create + /sources/{sourceId}#ZendeskTalk: get: tags: - "Sources" @@ -31511,10 +39165,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceYouNeedABudgetYnab" + operationId: "getSourceZendeskTalk" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YouNeedABudgetYnab#read + x-speakeasy-entity-operation: Source_ZendeskTalk#read put: tags: - "Sources" @@ -31522,7 +39176,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceYouNeedABudgetYnabPutRequest" + $ref: "#/components/schemas/SourceZendeskTalkPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31530,10 +39184,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceYouNeedABudgetYnab" + operationId: "putSourceZendeskTalk" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YouNeedABudgetYnab#update + x-speakeasy-entity-operation: Source_ZendeskTalk#update delete: tags: - "Sources" @@ -31544,10 +39198,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceYouNeedABudgetYnab" + operationId: "deleteSourceZendeskTalk" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YouNeedABudgetYnab#delete + x-speakeasy-entity-operation: Source_ZendeskTalk#delete parameters: - name: "sourceId" schema: @@ -31555,13 +39209,13 @@ paths: type: "string" in: "path" required: true - /sources#YoutubeAnalytics: + /sources#Zenefits: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceYoutubeAnalyticsCreateRequest" + $ref: "#/components/schemas/SourceZenefitsCreateRequest" tags: - "Sources" responses: @@ -31575,14 +39229,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceYoutubeAnalytics" + operationId: "createSourceZenefits" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YoutubeAnalytics#create - /sources/{sourceId}#YoutubeAnalytics: + x-speakeasy-entity-operation: Source_Zenefits#create + /sources/{sourceId}#Zenefits: get: tags: - "Sources" @@ -31597,10 +39251,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceYoutubeAnalytics" + operationId: "getSourceZenefits" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YoutubeAnalytics#read + x-speakeasy-entity-operation: Source_Zenefits#read put: tags: - "Sources" @@ -31608,7 +39262,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceYoutubeAnalyticsPutRequest" + $ref: "#/components/schemas/SourceZenefitsPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31616,10 +39270,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceYoutubeAnalytics" + operationId: "putSourceZenefits" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YoutubeAnalytics#update + x-speakeasy-entity-operation: Source_Zenefits#update delete: tags: - "Sources" @@ -31630,10 +39284,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceYoutubeAnalytics" + operationId: "deleteSourceZenefits" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_YoutubeAnalytics#delete + x-speakeasy-entity-operation: Source_Zenefits#delete parameters: - name: "sourceId" schema: @@ -31641,13 +39295,13 @@ paths: type: "string" in: "path" required: true - /sources#ZapierSupportedStorage: + /sources#Zenloop: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZapierSupportedStorageCreateRequest" + $ref: "#/components/schemas/SourceZenloopCreateRequest" tags: - "Sources" responses: @@ -31661,14 +39315,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZapierSupportedStorage" + operationId: "createSourceZenloop" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZapierSupportedStorage#create - /sources/{sourceId}#ZapierSupportedStorage: + x-speakeasy-entity-operation: Source_Zenloop#create + /sources/{sourceId}#Zenloop: get: tags: - "Sources" @@ -31683,10 +39337,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZapierSupportedStorage" + operationId: "getSourceZenloop" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZapierSupportedStorage#read + x-speakeasy-entity-operation: Source_Zenloop#read put: tags: - "Sources" @@ -31694,7 +39348,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZapierSupportedStoragePutRequest" + $ref: "#/components/schemas/SourceZenloopPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31702,10 +39356,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZapierSupportedStorage" + operationId: "putSourceZenloop" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZapierSupportedStorage#update + x-speakeasy-entity-operation: Source_Zenloop#update delete: tags: - "Sources" @@ -31716,10 +39370,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZapierSupportedStorage" + operationId: "deleteSourceZenloop" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZapierSupportedStorage#delete + x-speakeasy-entity-operation: Source_Zenloop#delete parameters: - name: "sourceId" schema: @@ -31727,13 +39381,13 @@ paths: type: "string" in: "path" required: true - /sources#ZendeskChat: + /sources#ZohoAnalyticsMetadataApi: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZendeskChatCreateRequest" + $ref: "#/components/schemas/SourceZohoAnalyticsMetadataApiCreateRequest" tags: - "Sources" responses: @@ -31747,14 +39401,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZendeskChat" + operationId: "createSourceZohoAnalyticsMetadataApi" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskChat#create - /sources/{sourceId}#ZendeskChat: + x-speakeasy-entity-operation: Source_ZohoAnalyticsMetadataApi#create + /sources/{sourceId}#ZohoAnalyticsMetadataApi: get: tags: - "Sources" @@ -31769,10 +39423,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZendeskChat" + operationId: "getSourceZohoAnalyticsMetadataApi" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskChat#read + x-speakeasy-entity-operation: Source_ZohoAnalyticsMetadataApi#read put: tags: - "Sources" @@ -31780,7 +39434,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZendeskChatPutRequest" + $ref: "#/components/schemas/SourceZohoAnalyticsMetadataApiPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31788,10 +39442,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZendeskChat" + operationId: "putSourceZohoAnalyticsMetadataApi" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskChat#update + x-speakeasy-entity-operation: Source_ZohoAnalyticsMetadataApi#update delete: tags: - "Sources" @@ -31802,10 +39456,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZendeskChat" + operationId: "deleteSourceZohoAnalyticsMetadataApi" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskChat#delete + x-speakeasy-entity-operation: Source_ZohoAnalyticsMetadataApi#delete parameters: - name: "sourceId" schema: @@ -31813,13 +39467,13 @@ paths: type: "string" in: "path" required: true - /sources#ZendeskSunshine: + /sources#ZohoBigin: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZendeskSunshineCreateRequest" + $ref: "#/components/schemas/SourceZohoBiginCreateRequest" tags: - "Sources" responses: @@ -31833,14 +39487,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZendeskSunshine" + operationId: "createSourceZohoBigin" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskSunshine#create - /sources/{sourceId}#ZendeskSunshine: + x-speakeasy-entity-operation: Source_ZohoBigin#create + /sources/{sourceId}#ZohoBigin: get: tags: - "Sources" @@ -31855,10 +39509,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZendeskSunshine" + operationId: "getSourceZohoBigin" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskSunshine#read + x-speakeasy-entity-operation: Source_ZohoBigin#read put: tags: - "Sources" @@ -31866,7 +39520,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZendeskSunshinePutRequest" + $ref: "#/components/schemas/SourceZohoBiginPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31874,10 +39528,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZendeskSunshine" + operationId: "putSourceZohoBigin" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskSunshine#update + x-speakeasy-entity-operation: Source_ZohoBigin#update delete: tags: - "Sources" @@ -31888,10 +39542,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZendeskSunshine" + operationId: "deleteSourceZohoBigin" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskSunshine#delete + x-speakeasy-entity-operation: Source_ZohoBigin#delete parameters: - name: "sourceId" schema: @@ -31899,13 +39553,13 @@ paths: type: "string" in: "path" required: true - /sources#ZendeskSupport: + /sources#ZohoBilling: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZendeskSupportCreateRequest" + $ref: "#/components/schemas/SourceZohoBillingCreateRequest" tags: - "Sources" responses: @@ -31919,14 +39573,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZendeskSupport" + operationId: "createSourceZohoBilling" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskSupport#create - /sources/{sourceId}#ZendeskSupport: + x-speakeasy-entity-operation: Source_ZohoBilling#create + /sources/{sourceId}#ZohoBilling: get: tags: - "Sources" @@ -31941,10 +39595,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZendeskSupport" + operationId: "getSourceZohoBilling" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskSupport#read + x-speakeasy-entity-operation: Source_ZohoBilling#read put: tags: - "Sources" @@ -31952,7 +39606,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZendeskSupportPutRequest" + $ref: "#/components/schemas/SourceZohoBillingPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -31960,10 +39614,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZendeskSupport" + operationId: "putSourceZohoBilling" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskSupport#update + x-speakeasy-entity-operation: Source_ZohoBilling#update delete: tags: - "Sources" @@ -31974,10 +39628,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZendeskSupport" + operationId: "deleteSourceZohoBilling" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskSupport#delete + x-speakeasy-entity-operation: Source_ZohoBilling#delete parameters: - name: "sourceId" schema: @@ -31985,13 +39639,13 @@ paths: type: "string" in: "path" required: true - /sources#ZendeskTalk: + /sources#ZohoBooks: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZendeskTalkCreateRequest" + $ref: "#/components/schemas/SourceZohoBooksCreateRequest" tags: - "Sources" responses: @@ -32005,14 +39659,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZendeskTalk" + operationId: "createSourceZohoBooks" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskTalk#create - /sources/{sourceId}#ZendeskTalk: + x-speakeasy-entity-operation: Source_ZohoBooks#create + /sources/{sourceId}#ZohoBooks: get: tags: - "Sources" @@ -32027,10 +39681,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZendeskTalk" + operationId: "getSourceZohoBooks" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskTalk#read + x-speakeasy-entity-operation: Source_ZohoBooks#read put: tags: - "Sources" @@ -32038,7 +39692,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZendeskTalkPutRequest" + $ref: "#/components/schemas/SourceZohoBooksPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -32046,10 +39700,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZendeskTalk" + operationId: "putSourceZohoBooks" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskTalk#update + x-speakeasy-entity-operation: Source_ZohoBooks#update delete: tags: - "Sources" @@ -32060,10 +39714,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZendeskTalk" + operationId: "deleteSourceZohoBooks" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZendeskTalk#delete + x-speakeasy-entity-operation: Source_ZohoBooks#delete parameters: - name: "sourceId" schema: @@ -32071,13 +39725,13 @@ paths: type: "string" in: "path" required: true - /sources#Zenefits: + /sources#ZohoCampaign: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZenefitsCreateRequest" + $ref: "#/components/schemas/SourceZohoCampaignCreateRequest" tags: - "Sources" responses: @@ -32091,14 +39745,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZenefits" + operationId: "createSourceZohoCampaign" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Zenefits#create - /sources/{sourceId}#Zenefits: + x-speakeasy-entity-operation: Source_ZohoCampaign#create + /sources/{sourceId}#ZohoCampaign: get: tags: - "Sources" @@ -32113,10 +39767,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZenefits" + operationId: "getSourceZohoCampaign" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Zenefits#read + x-speakeasy-entity-operation: Source_ZohoCampaign#read put: tags: - "Sources" @@ -32124,7 +39778,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZenefitsPutRequest" + $ref: "#/components/schemas/SourceZohoCampaignPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -32132,10 +39786,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZenefits" + operationId: "putSourceZohoCampaign" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Zenefits#update + x-speakeasy-entity-operation: Source_ZohoCampaign#update delete: tags: - "Sources" @@ -32146,10 +39800,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZenefits" + operationId: "deleteSourceZohoCampaign" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Zenefits#delete + x-speakeasy-entity-operation: Source_ZohoCampaign#delete parameters: - name: "sourceId" schema: @@ -32157,13 +39811,13 @@ paths: type: "string" in: "path" required: true - /sources#Zenloop: + /sources#ZohoCrm: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZenloopCreateRequest" + $ref: "#/components/schemas/SourceZohoCrmCreateRequest" tags: - "Sources" responses: @@ -32177,14 +39831,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZenloop" + operationId: "createSourceZohoCrm" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Zenloop#create - /sources/{sourceId}#Zenloop: + x-speakeasy-entity-operation: Source_ZohoCrm#create + /sources/{sourceId}#ZohoCrm: get: tags: - "Sources" @@ -32199,10 +39853,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZenloop" + operationId: "getSourceZohoCrm" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Zenloop#read + x-speakeasy-entity-operation: Source_ZohoCrm#read put: tags: - "Sources" @@ -32210,7 +39864,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZenloopPutRequest" + $ref: "#/components/schemas/SourceZohoCrmPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -32218,10 +39872,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZenloop" + operationId: "putSourceZohoCrm" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Zenloop#update + x-speakeasy-entity-operation: Source_ZohoCrm#update delete: tags: - "Sources" @@ -32232,10 +39886,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZenloop" + operationId: "deleteSourceZohoCrm" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Zenloop#delete + x-speakeasy-entity-operation: Source_ZohoCrm#delete parameters: - name: "sourceId" schema: @@ -32243,13 +39897,13 @@ paths: type: "string" in: "path" required: true - /sources#ZohoBooks: + /sources#ZohoDesk: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZohoBooksCreateRequest" + $ref: "#/components/schemas/SourceZohoDeskCreateRequest" tags: - "Sources" responses: @@ -32263,14 +39917,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZohoBooks" + operationId: "createSourceZohoDesk" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZohoBooks#create - /sources/{sourceId}#ZohoBooks: + x-speakeasy-entity-operation: Source_ZohoDesk#create + /sources/{sourceId}#ZohoDesk: get: tags: - "Sources" @@ -32285,10 +39939,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZohoBooks" + operationId: "getSourceZohoDesk" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZohoBooks#read + x-speakeasy-entity-operation: Source_ZohoDesk#read put: tags: - "Sources" @@ -32296,7 +39950,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZohoBooksPutRequest" + $ref: "#/components/schemas/SourceZohoDeskPutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -32304,10 +39958,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZohoBooks" + operationId: "putSourceZohoDesk" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZohoBooks#update + x-speakeasy-entity-operation: Source_ZohoDesk#update delete: tags: - "Sources" @@ -32318,10 +39972,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZohoBooks" + operationId: "deleteSourceZohoDesk" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZohoBooks#delete + x-speakeasy-entity-operation: Source_ZohoDesk#delete parameters: - name: "sourceId" schema: @@ -32329,13 +39983,13 @@ paths: type: "string" in: "path" required: true - /sources#ZohoCrm: + /sources#ZohoExpense: post: requestBody: content: application/json: schema: - $ref: "#/components/schemas/SourceZohoCrmCreateRequest" + $ref: "#/components/schemas/SourceZohoExpenseCreateRequest" tags: - "Sources" responses: @@ -32349,14 +40003,14 @@ paths: description: "Invalid data" "403": description: "Not allowed" - operationId: "createSourceZohoCrm" + operationId: "createSourceZohoExpense" summary: "Create a source" description: "Creates a source given a name, workspace id, and a json blob containing\ \ the configuration for the source." x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZohoCrm#create - /sources/{sourceId}#ZohoCrm: + x-speakeasy-entity-operation: Source_ZohoExpense#create + /sources/{sourceId}#ZohoExpense: get: tags: - "Sources" @@ -32371,10 +40025,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "getSourceZohoCrm" + operationId: "getSourceZohoExpense" summary: "Get Source details" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZohoCrm#read + x-speakeasy-entity-operation: Source_ZohoExpense#read put: tags: - "Sources" @@ -32382,7 +40036,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/SourceZohoCrmPutRequest" + $ref: "#/components/schemas/SourceZohoExpensePutRequest" responses: "2XX": description: "The resource was updated successfully" @@ -32390,10 +40044,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "putSourceZohoCrm" + operationId: "putSourceZohoExpense" summary: "Update a Source fully" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZohoCrm#update + x-speakeasy-entity-operation: Source_ZohoExpense#update delete: tags: - "Sources" @@ -32404,10 +40058,10 @@ paths: description: "Not allowed" "404": description: "Not found" - operationId: "deleteSourceZohoCrm" + operationId: "deleteSourceZohoExpense" summary: "Delete a Source" x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_ZohoCrm#delete + x-speakeasy-entity-operation: Source_ZohoExpense#delete parameters: - name: "sourceId" schema: @@ -32501,6 +40155,178 @@ paths: type: "string" in: "path" required: true + /sources#ZohoInvoice: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZohoInvoiceCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZohoInvoice" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZohoInvoice#create + /sources/{sourceId}#ZohoInvoice: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZohoInvoice" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZohoInvoice#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZohoInvoicePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZohoInvoice" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZohoInvoice#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZohoInvoice" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZohoInvoice#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ZonkaFeedback: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZonkaFeedbackCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZonkaFeedback" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZonkaFeedback#create + /sources/{sourceId}#ZonkaFeedback: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZonkaFeedback" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZonkaFeedback#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZonkaFeedbackPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZonkaFeedback" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZonkaFeedback#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZonkaFeedback" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZonkaFeedback#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /sources#Zoom: post: requestBody: @@ -36291,6 +44117,7 @@ components: - "hashing" - "field-renaming" - "row-filtering" + - "encryption" x-speakeasy-component: true MapperConfiguration: type: "object" @@ -36711,6 +44538,7 @@ components: - "schedule" - "dataResidency" - "configurations" + - "createdAt" properties: connectionId: format: "UUID" @@ -36742,6 +44570,9 @@ components: type: "string" configurations: $ref: "#/components/schemas/StreamConfigurations" + createdAt: + format: "int64" + type: "integer" x-speakeasy-entity: Connection x-speakeasy-param-suppress-computed-diff: true x-speakeasy-component: true @@ -36859,6 +44690,7 @@ components: - "definitionId" - "workspaceId" - "configuration" + - "createdAt" properties: destinationId: format: "UUID" @@ -36875,6 +44707,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/DestinationConfiguration" + createdAt: + format: "int64" + type: "integer" example: destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -36893,6 +44728,7 @@ components: - "workspaceId" - "configuration" - "definitionId" + - "createdAt" properties: sourceId: format: "UUID" @@ -36909,6 +44745,9 @@ components: type: "string" configuration: $ref: "#/components/schemas/SourceConfiguration" + createdAt: + format: "int64" + type: "integer" example: sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" name: "Analytics Team Postgres" @@ -37445,6 +45284,71 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-google-forms: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "form_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + form_id: + type: "array" + order: 3 + title: "Form IDs" + sourceType: + title: "google-forms" + const: "google-forms" + enum: + - "google-forms" + order: 0 + type: "string" + source-google-forms-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "form_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + form_id: + type: "array" + order: 3 + title: "Form IDs" source-the-guardian-api: title: "The Guardian Api Spec" type: "object" @@ -37843,6 +45747,65 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-google-classroom: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + title: "OAuth Client ID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + title: "OAuth Client Secret" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "google-classroom" + const: "google-classroom" + enum: + - "google-classroom" + order: 0 + type: "string" + source-google-classroom-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + properties: + client_id: + type: "string" + name: "client_id" + title: "OAuth Client ID" + airbyte_secret: true + order: 0 + client_secret: + type: "string" + name: "client_secret" + title: "OAuth Client Secret" + airbyte_secret: true + order: 1 + client_refresh_token: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 2 source-klaviyo: title: "Klaviyo Spec" type: "object" @@ -37879,6 +45842,23 @@ components: \ the \"predictive_analytics\" column from being populated in your downstream\ \ destination." order: 2 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 3 sourceType: title: "klaviyo" const: "klaviyo" @@ -37924,6 +45904,23 @@ components: \ the \"predictive_analytics\" column from being populated in your downstream\ \ destination." order: 2 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 3 required: - "api_key" source-datadog: @@ -38217,7 +46214,6 @@ components: title: "API Key" airbyte_secret: true source-quickbooks: - title: "Source QuickBooks Spec" type: "object" required: - "credentials" @@ -38226,9 +46222,7 @@ components: - "sourceType" properties: credentials: - title: "Authorization Method" type: "object" - order: 0 oneOf: - type: "object" title: "OAuth2.0" @@ -38240,6 +46234,14 @@ components: - "token_expiry_date" - "realm_id" properties: + realm_id: + type: "string" + title: "Realm ID" + description: + "Labeled Company ID. The Make API Calls panel is populated\ + \ with the realm id and the current access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true auth_type: type: "string" const: "oauth2.0" @@ -38253,58 +46255,52 @@ components: \ value from the Keys tab on the app profile via My Apps on the\ \ developer site. There are two versions of this key: development\ \ and production." + access_token: + type: "string" + title: "Access Token" + description: "Access token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true client_secret: + type: "string" + title: "Client Secret" description: " Obtain this value from the Keys tab on the app profile\ \ via My Apps on the developer site. There are two versions of this\ \ key: development and production." - title: "Client Secret" - type: "string" airbyte_secret: true x-speakeasy-param-sensitive: true refresh_token: - description: "A token used when refreshing the access token." - title: "Refresh Token" - type: "string" - airbyte_secret: true - x-speakeasy-param-sensitive: true - access_token: - description: "Access token for making authenticated requests." - title: "Access Token" type: "string" + title: "Refresh Token" + description: "A token used when refreshing the access token." airbyte_secret: true x-speakeasy-param-sensitive: true token_expiry_date: type: "string" title: "Token Expiry Date" - description: "The date-time when the access token should be refreshed." format: "date-time" - realm_id: - description: - "Labeled Company ID. The Make API Calls panel is populated\ - \ with the realm id and the current access token." - title: "Realm ID" - type: "string" - airbyte_secret: true - x-speakeasy-param-sensitive: true + description: "The date-time when the access token should be refreshed." + order: 0 + title: "Authorization Method" start_date: + type: "string" order: 1 - description: - "The default value to use if no bookmark exists for an endpoint\ - \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ - \ date will not be replicated." title: "Start Date" - type: "string" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-03-20T00:00:00Z" + description: + "The default value to use if no bookmark exists for an endpoint\ + \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ + \ date will not be replicated." sandbox: + type: "boolean" order: 2 - description: "Determines whether to use the sandbox or production environment." title: "Sandbox" - type: "boolean" default: false + description: "Determines whether to use the sandbox or production environment." sourceType: title: "quickbooks" const: "quickbooks" @@ -38313,7 +46309,6 @@ components: order: 0 type: "string" source-quickbooks-update: - title: "Source QuickBooks Spec" type: "object" required: - "credentials" @@ -38321,9 +46316,7 @@ components: - "sandbox" properties: credentials: - title: "Authorization Method" type: "object" - order: 0 oneOf: - type: "object" title: "OAuth2.0" @@ -38335,6 +46328,13 @@ components: - "token_expiry_date" - "realm_id" properties: + realm_id: + type: "string" + title: "Realm ID" + description: + "Labeled Company ID. The Make API Calls panel is populated\ + \ with the realm id and the current access token." + airbyte_secret: true auth_type: type: "string" const: "oauth2.0" @@ -38348,54 +46348,94 @@ components: \ value from the Keys tab on the app profile via My Apps on the\ \ developer site. There are two versions of this key: development\ \ and production." + access_token: + type: "string" + title: "Access Token" + description: "Access token for making authenticated requests." + airbyte_secret: true client_secret: + type: "string" + title: "Client Secret" description: " Obtain this value from the Keys tab on the app profile\ \ via My Apps on the developer site. There are two versions of this\ \ key: development and production." - title: "Client Secret" - type: "string" airbyte_secret: true refresh_token: - description: "A token used when refreshing the access token." - title: "Refresh Token" - type: "string" - airbyte_secret: true - access_token: - description: "Access token for making authenticated requests." - title: "Access Token" type: "string" + title: "Refresh Token" + description: "A token used when refreshing the access token." airbyte_secret: true token_expiry_date: type: "string" title: "Token Expiry Date" - description: "The date-time when the access token should be refreshed." format: "date-time" - realm_id: - description: - "Labeled Company ID. The Make API Calls panel is populated\ - \ with the realm id and the current access token." - title: "Realm ID" - type: "string" - airbyte_secret: true + description: "The date-time when the access token should be refreshed." + order: 0 + title: "Authorization Method" start_date: + type: "string" order: 1 - description: - "The default value to use if no bookmark exists for an endpoint\ - \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ - \ date will not be replicated." title: "Start Date" - type: "string" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-03-20T00:00:00Z" + description: + "The default value to use if no bookmark exists for an endpoint\ + \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ + \ date will not be replicated." sandbox: + type: "boolean" order: 2 - description: "Determines whether to use the sandbox or production environment." title: "Sandbox" - type: "boolean" default: false + description: "Determines whether to use the sandbox or production environment." + source-flowlu: + type: "object" + required: + - "api_key" + - "company" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + company: + type: "string" + name: "company" + order: 1 + title: "Company" + sourceType: + title: "flowlu" + const: "flowlu" + enum: + - "flowlu" + order: 0 + type: "string" + source-flowlu-update: + type: "object" + required: + - "api_key" + - "company" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + company: + type: "string" + name: "company" + order: 1 + title: "Company" source-beamer: type: "object" required: @@ -38991,6 +47031,49 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-mailosaur: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + description: 'Enter "api" here' + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your api key here" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "mailosaur" + const: "mailosaur" + enum: + - "mailosaur" + order: 0 + type: "string" + source-mailosaur-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + description: 'Enter "api" here' + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your api key here" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true source-buildkite: type: "object" required: @@ -39695,6 +47778,77 @@ components: examples: - "https://api.kaon.kyve.network/" - "https://api.korellia.kyve.network/" + source-capsule-crm: + type: "object" + required: + - "bearer_token" + - "start_date" + - "entity" + - "sourceType" + properties: + bearer_token: + type: "string" + description: + "Bearer token to authenticate API requests. Generate it from\ + \ the 'My Preferences' > 'API Authentication Tokens' page in your Capsule\ + \ account." + name: "bearer_token" + order: 0 + title: "Bearer Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + entity: + type: "string" + title: "Entity" + enum: + - "parties" + - "opportunities" + - "kases" + order: 2 + sourceType: + title: "capsule-crm" + const: "capsule-crm" + enum: + - "capsule-crm" + order: 0 + type: "string" + source-capsule-crm-update: + type: "object" + required: + - "bearer_token" + - "start_date" + - "entity" + properties: + bearer_token: + type: "string" + description: + "Bearer token to authenticate API requests. Generate it from\ + \ the 'My Preferences' > 'API Authentication Tokens' page in your Capsule\ + \ account." + name: "bearer_token" + order: 0 + title: "Bearer Token" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + entity: + type: "string" + title: "Entity" + enum: + - "parties" + - "opportunities" + - "kases" + order: 2 source-sigma-computing: type: "object" required: @@ -40105,6 +48259,48 @@ components: default: 100000 minimum: 15000 maximum: 200000 + source-shippo: + type: "object" + required: + - "shippo_token" + - "start_date" + - "sourceType" + properties: + shippo_token: + type: "string" + description: "The bearer token used for making requests" + title: "Shippo Token" + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "shippo" + const: "shippo" + enum: + - "shippo" + order: 0 + type: "string" + source-shippo-update: + type: "object" + required: + - "shippo_token" + - "start_date" + properties: + shippo_token: + type: "string" + description: "The bearer token used for making requests" + title: "Shippo Token" + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 source-coda: type: "object" required: @@ -41550,7 +49746,6 @@ components: - "client_id" - "client_secret" - "tenant_id" - - "application_id_uri" - "user_id" - "sourceType" properties: @@ -41572,15 +49767,9 @@ components: title: "Tenant Id" airbyte_secret: true x-speakeasy-param-sensitive: true - application_id_uri: - type: "string" - order: 3 - title: "Application Id URI" - airbyte_secret: true - x-speakeasy-param-sensitive: true user_id: type: "string" - order: 4 + order: 3 title: "User Id" airbyte_secret: true x-speakeasy-param-sensitive: true @@ -41597,7 +49786,6 @@ components: - "client_id" - "client_secret" - "tenant_id" - - "application_id_uri" - "user_id" properties: client_id: @@ -41615,16 +49803,62 @@ components: order: 2 title: "Tenant Id" airbyte_secret: true - application_id_uri: + user_id: type: "string" order: 3 - title: "Application Id URI" + title: "User Id" airbyte_secret: true - user_id: + source-less-annoying-crm: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: type: "string" - order: 4 - title: "User Id" + description: + "API key to use. Manage and create your API keys on the Programmer\ + \ API settings page at https://account.lessannoyingcrm.com/app/Settings/Api." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "less-annoying-crm" + const: "less-annoying-crm" + enum: + - "less-annoying-crm" + order: 0 + type: "string" + source-less-annoying-crm-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Manage and create your API keys on the Programmer\ + \ API settings page at https://account.lessannoyingcrm.com/app/Settings/Api." + name: "api_key" + order: 0 + title: "API Key" airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-planhat: type: "object" required: @@ -41660,6 +49894,39 @@ components: order: 0 title: "API Token" airbyte_secret: true + source-encharge: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "encharge" + const: "encharge" + enum: + - "encharge" + order: 0 + type: "string" + source-encharge-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-shortio: title: "Shortio Spec" type: "object" @@ -41756,6 +50023,146 @@ components: title: "Rest API Key" airbyte_secret: true description: "Instatus REST API key" + source-flexmail: + type: "object" + required: + - "account_id" + - "personal_access_token" + - "sourceType" + properties: + account_id: + type: "string" + description: + "Your Flexmail account ID. You can find it in your Flexmail\ + \ account settings." + name: "account_id" + order: 0 + title: "Account ID" + personal_access_token: + type: "string" + description: + "A personal access token for API authentication. Manage your\ + \ tokens in Flexmail under Settings > API > Personal access tokens." + name: "personal_access_token" + order: 1 + title: "Personal Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "flexmail" + const: "flexmail" + enum: + - "flexmail" + order: 0 + type: "string" + source-flexmail-update: + type: "object" + required: + - "account_id" + - "personal_access_token" + properties: + account_id: + type: "string" + description: + "Your Flexmail account ID. You can find it in your Flexmail\ + \ account settings." + name: "account_id" + order: 0 + title: "Account ID" + personal_access_token: + type: "string" + description: + "A personal access token for API authentication. Manage your\ + \ tokens in Flexmail under Settings > API > Personal access tokens." + name: "personal_access_token" + order: 1 + title: "Personal Access Token" + airbyte_secret: true + source-openfda: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "openfda" + const: "openfda" + enum: + - "openfda" + order: 0 + type: "string" + source-openfda-update: + type: "object" + required: [] + properties: {} + source-elasticemail: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + scope_type: + type: "string" + enum: + - "Personal" + - "Global" + order: 1 + title: "scope type" + from: + type: "string" + order: 2 + title: "From" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "elasticemail" + const: "elasticemail" + enum: + - "elasticemail" + order: 0 + type: "string" + source-elasticemail-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + scope_type: + type: "string" + enum: + - "Personal" + - "Global" + order: 1 + title: "scope type" + from: + type: "string" + order: 2 + title: "From" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 source-vwo: type: "object" required: @@ -42686,6 +51093,39 @@ components: \ data before this date will not be replicated." type: "string" format: "date-time" + source-paperform: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "paperform" + const: "paperform" + enum: + - "paperform" + order: 0 + type: "string" + source-paperform-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-microsoft-sharepoint: title: "Microsoft SharePoint Source Spec" description: @@ -44039,62 +52479,91 @@ components: order: 4 x-speakeasy-param-sensitive: true sourceType: - title: "sonar-cloud" - const: "sonar-cloud" + title: "sonar-cloud" + const: "sonar-cloud" + enum: + - "sonar-cloud" + order: 0 + type: "string" + source-sonar-cloud-update: + type: "object" + required: + - "component_keys" + - "organization" + - "user_token" + properties: + component_keys: + type: "array" + title: "Component Keys" + description: "Comma-separated list of component keys." + examples: + - "airbyte-ws-order" + - "airbyte-ws-checkout" + order: 0 + end_date: + type: "string" + title: "End date" + description: "To retrieve issues created before the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 1 + organization: + type: "string" + title: "Organization" + description: + "Organization key. See here." + examples: + - "airbyte" + order: 2 + start_date: + type: "string" + title: "Start date" + description: "To retrieve issues created after the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 3 + user_token: + type: "string" + title: "User Token" + description: + "Your User Token. See here. The token is case sensitive." + airbyte_secret: true + order: 4 + source-tinyemail: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "tinyemail" + const: "tinyemail" enum: - - "sonar-cloud" + - "tinyemail" order: 0 type: "string" - source-sonar-cloud-update: + source-tinyemail-update: type: "object" required: - - "component_keys" - - "organization" - - "user_token" + - "api_key" properties: - component_keys: - type: "array" - title: "Component Keys" - description: "Comma-separated list of component keys." - examples: - - "airbyte-ws-order" - - "airbyte-ws-checkout" - order: 0 - end_date: - type: "string" - title: "End date" - description: "To retrieve issues created before the given date (inclusive)." - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - examples: - - "YYYY-MM-DD" - format: "date" - order: 1 - organization: - type: "string" - title: "Organization" - description: - "Organization key. See here." - examples: - - "airbyte" - order: 2 - start_date: - type: "string" - title: "Start date" - description: "To retrieve issues created after the given date (inclusive)." - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - examples: - - "YYYY-MM-DD" - format: "date" - order: 3 - user_token: + api_key: type: "string" - title: "User Token" - description: - "Your User Token. See here. The token is case sensitive." + order: 0 + title: "API Key" airbyte_secret: true - order: 4 source-lob: type: "object" required: @@ -44252,6 +52721,117 @@ components: description: "Start date" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" order: 2 + source-apptivo: + type: "object" + required: + - "api_key" + - "access_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your Apptivo account under Business\ + \ Settings -> API Access." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_key: + type: "string" + order: 1 + title: "Access Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "apptivo" + const: "apptivo" + enum: + - "apptivo" + order: 0 + type: "string" + source-apptivo-update: + type: "object" + required: + - "api_key" + - "access_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your Apptivo account under Business\ + \ Settings -> API Access." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + access_key: + type: "string" + order: 1 + title: "Access Key" + airbyte_secret: true + source-zonka-feedback: + type: "object" + required: + - "datacenter" + - "auth_token" + - "sourceType" + properties: + datacenter: + type: "string" + description: + "The identifier for the data center, such as 'us1' or 'e' for\ + \ EU." + enum: + - "us1" + - "e" + name: "dc_id" + order: 0 + title: "Data Center ID" + auth_token: + type: "string" + description: + "Auth token to use. Generate it by navigating to Company Settings\ + \ > Developers > API in your Zonka Feedback account." + name: "auth_token" + order: 1 + title: "Auth Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zonka-feedback" + const: "zonka-feedback" + enum: + - "zonka-feedback" + order: 0 + type: "string" + source-zonka-feedback-update: + type: "object" + required: + - "datacenter" + - "auth_token" + properties: + datacenter: + type: "string" + description: + "The identifier for the data center, such as 'us1' or 'e' for\ + \ EU." + enum: + - "us1" + - "e" + name: "dc_id" + order: 0 + title: "Data Center ID" + auth_token: + type: "string" + description: + "Auth token to use. Generate it by navigating to Company Settings\ + \ > Developers > API in your Zonka Feedback account." + name: "auth_token" + order: 1 + title: "Auth Token" + airbyte_secret: true source-orb: type: "object" required: @@ -45142,6 +53722,199 @@ components: type: "string" order: 2 title: "workspace" + source-fillout: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Developer settings tab of your\ + \ Fillout account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "fillout" + const: "fillout" + enum: + - "fillout" + order: 0 + type: "string" + source-fillout-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Developer settings tab of your\ + \ Fillout account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-inflowinventory: + type: "object" + required: + - "api_key" + - "companyid" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + companyid: + type: "string" + order: 1 + title: "CompanyID" + sourceType: + title: "inflowinventory" + const: "inflowinventory" + enum: + - "inflowinventory" + order: 0 + type: "string" + source-inflowinventory-update: + type: "object" + required: + - "api_key" + - "companyid" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + companyid: + type: "string" + order: 1 + title: "CompanyID" + source-clockodo: + type: "object" + required: + - "api_key" + - "email_address" + - "external_application" + - "years" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the 'Personal data' section of\ + \ your Clockodo account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + email_address: + type: "string" + description: + "Your Clockodo account email address. Find it in your Clockodo\ + \ account settings." + name: "email_address" + order: 1 + title: "Email Address" + external_application: + type: "string" + description: + "Identification of the calling application, including the email\ + \ address of a technical contact person. Format: [name of application\ + \ or company];[email address]." + name: "external_application" + order: 2 + title: "External Application Header" + default: "Airbyte" + years: + type: "array" + description: "2024, 2025" + title: "Years" + order: 3 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 + sourceType: + title: "clockodo" + const: "clockodo" + enum: + - "clockodo" + order: 0 + type: "string" + source-clockodo-update: + type: "object" + required: + - "api_key" + - "email_address" + - "external_application" + - "years" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the 'Personal data' section of\ + \ your Clockodo account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + email_address: + type: "string" + description: + "Your Clockodo account email address. Find it in your Clockodo\ + \ account settings." + name: "email_address" + order: 1 + title: "Email Address" + external_application: + type: "string" + description: + "Identification of the calling application, including the email\ + \ address of a technical contact person. Format: [name of application\ + \ or company];[email address]." + name: "external_application" + order: 2 + title: "External Application Header" + default: "Airbyte" + years: + type: "array" + description: "2024, 2025" + title: "Years" + order: 3 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 source-google-webfonts: type: "object" required: @@ -46285,6 +55058,103 @@ components: title: "API Key" airbyte_secret: true order: 0 + source-zoho-analytics-metadata-api: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "data_center" + - "org_id" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 2 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - "com.cn" + - "jp" + order: 3 + title: "Data Center" + default: "com" + org_id: + type: "number" + order: 4 + title: "Org Id" + sourceType: + title: "zoho-analytics-metadata-api" + const: "zoho-analytics-metadata-api" + enum: + - "zoho-analytics-metadata-api" + order: 0 + type: "string" + source-zoho-analytics-metadata-api-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "data_center" + - "org_id" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 2 + title: "OAuth Refresh Token" + airbyte_secret: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - "com.cn" + - "jp" + order: 3 + title: "Data Center" + default: "com" + org_id: + type: "number" + order: 4 + title: "Org Id" source-buzzsprout: type: "object" required: @@ -46340,6 +55210,43 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 2 + source-rocketlane: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the API section in Settings\ + \ of your Rocketlane account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "rocketlane" + const: "rocketlane" + enum: + - "rocketlane" + order: 0 + type: "string" + source-rocketlane-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the API section in Settings\ + \ of your Rocketlane account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-youtube-analytics: title: "YouTube Analytics Spec" type: "object" @@ -46415,6 +55322,35 @@ components: "A refresh token generated using the above client ID and\ \ secret" airbyte_secret: true + source-systeme: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "systeme" + const: "systeme" + enum: + - "systeme" + order: 0 + type: "string" + source-systeme-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true source-zendesk-talk: type: "object" title: "Source Zendesk Talk Spec" @@ -46654,6 +55590,76 @@ components: \ example.thinkific.com, your subdomain is \"example\"." order: 1 title: "subdomain" + source-papersign: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "papersign" + const: "papersign" + enum: + - "papersign" + order: 0 + type: "string" + source-papersign-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Generate it on your account page at https://paperform.co/account/developer." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-eventzilla: + type: "object" + required: + - "x-api-key" + - "sourceType" + properties: + x-api-key: + type: "string" + description: + "API key to use. Generate it by creating a new application\ + \ within your Eventzilla account settings under Settings > App Management." + name: "x-api-key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "eventzilla" + const: "eventzilla" + enum: + - "eventzilla" + order: 0 + type: "string" + source-eventzilla-update: + type: "object" + required: + - "x-api-key" + properties: + x-api-key: + type: "string" + description: + "API key to use. Generate it by creating a new application\ + \ within your Eventzilla account settings under Settings > App Management." + name: "x-api-key" + order: 0 + title: "API Key" + airbyte_secret: true source-plausible: type: "object" required: @@ -46917,6 +55923,85 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-zoho-campaign: + type: "object" + required: + - "client_id_2" + - "client_secret_2" + - "client_refresh_token" + - "data_center" + - "sourceType" + properties: + client_id_2: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret_2: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - ".jp" + - ".com.cn" + order: 3 + title: "Data Center" + sourceType: + title: "zoho-campaign" + const: "zoho-campaign" + enum: + - "zoho-campaign" + order: 0 + type: "string" + source-zoho-campaign-update: + type: "object" + required: + - "client_id_2" + - "client_secret_2" + - "client_refresh_token" + - "data_center" + properties: + client_id_2: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret_2: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + data_center: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.au" + - ".jp" + - ".com.cn" + order: 3 + title: "Data Center" source-oura: type: "object" required: @@ -46976,6 +56061,46 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" description: "End datetime to sync until. Default is current UTC datetime." + source-cin7: + type: "object" + required: + - "accountid" + - "api_key" + - "sourceType" + properties: + accountid: + type: "string" + description: "The ID associated with your account." + order: 0 + title: "Account ID" + api_key: + type: "string" + description: "The API key associated with your account." + order: 1 + title: "API Key" + sourceType: + title: "cin7" + const: "cin7" + enum: + - "cin7" + order: 0 + type: "string" + source-cin7-update: + type: "object" + required: + - "accountid" + - "api_key" + properties: + accountid: + type: "string" + description: "The ID associated with your account." + order: 0 + title: "Account ID" + api_key: + type: "string" + description: "The API key associated with your account." + order: 1 + title: "API Key" source-looker: type: "object" required: @@ -47112,6 +56237,55 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-campayn: + type: "object" + required: + - "sub_domain" + - "api_key" + - "sourceType" + properties: + sub_domain: + type: "string" + name: "domain" + title: "Sub Domain" + order: 0 + api_key: + type: "string" + description: + "API key to use. Find it in your Campayn account settings.\ + \ Keep it secure as it grants access to your Campayn data." + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "campayn" + const: "campayn" + enum: + - "campayn" + order: 0 + type: "string" + source-campayn-update: + type: "object" + required: + - "sub_domain" + - "api_key" + properties: + sub_domain: + type: "string" + name: "domain" + title: "Sub Domain" + order: 0 + api_key: + type: "string" + description: + "API key to use. Find it in your Campayn account settings.\ + \ Keep it secure as it grants access to your Campayn data." + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 1 source-google-tasks: type: "object" required: @@ -48529,6 +57703,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 7 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" host: title: "Host Address" description: "The server host address" @@ -48583,7 +57800,9 @@ components: description: "The Private key" multiline: true order: 4 + airbyte_secret: true type: "string" + x-speakeasy-param-sensitive: true required: - "private_key" - "auth_type" @@ -49091,6 +58310,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 7 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" host: title: "Host Address" description: "The server host address" @@ -49144,6 +58406,7 @@ components: description: "The Private key" multiline: true order: 4 + airbyte_secret: true type: "string" required: - "private_key" @@ -50518,6 +59781,57 @@ components: default: false order: 4 type: "boolean" + source-oncehub: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your OnceHub account under the\ + \ API & Webhooks Integration page." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "oncehub" + const: "oncehub" + enum: + - "oncehub" + order: 0 + type: "string" + source-oncehub-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in your OnceHub account under the\ + \ API & Webhooks Integration page." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-aws-cloudtrail: title: "Aws CloudTrail Spec" type: "object" @@ -50739,6 +60053,18 @@ components: \ for more info." default: false order: 6 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: "The number of worker threads to use for the sync." + order: 7 sourceType: title: "jira" const: "jira" @@ -50831,6 +60157,18 @@ components: \ for more info." default: false order: 6 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: "The number of worker threads to use for the sync." + order: 7 source-smartwaiver: type: "object" required: @@ -50892,6 +60230,61 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 2 + source-bunny-inc: + type: "object" + required: + - "subdomain" + - "apikey" + - "sourceType" + properties: + apikey: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "The subdomain specific to your Bunny account or service." + name: "subdomain" + order: 0 + title: "Subdomain" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "bunny-inc" + const: "bunny-inc" + enum: + - "bunny-inc" + order: 0 + type: "string" + source-bunny-inc-update: + type: "object" + required: + - "subdomain" + - "apikey" + properties: + apikey: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "The subdomain specific to your Bunny account or service." + name: "subdomain" + order: 0 + title: "Subdomain" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-hubspot: title: "HubSpot Source Spec" type: "object" @@ -51199,6 +60592,86 @@ components: \ pagination will begin with that number to end of available comics" default: "2960" order: 0 + source-jobnimbus: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it by logging into your JobNimbus account,\ + \ navigating to settings, and creating a new API key under the API section." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "jobnimbus" + const: "jobnimbus" + enum: + - "jobnimbus" + order: 0 + type: "string" + source-jobnimbus-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it by logging into your JobNimbus account,\ + \ navigating to settings, and creating a new API key under the API section." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-marketstack: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "marketstack" + const: "marketstack" + enum: + - "marketstack" + order: 0 + type: "string" + source-marketstack-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 source-zenloop: title: "Zenloop Spec" type: "object" @@ -51370,6 +60843,23 @@ components: - "2.0" default: "2.0" order: 3 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 4 sourceType: title: "chargebee" const: "chargebee" @@ -51424,6 +60914,23 @@ components: - "2.0" default: "2.0" order: 3 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 50 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Chargebee plan. More info\ + \ about the rate limit plan tiers can be found on Chargebee's API docs." + order: 4 source-wrike: type: "object" required: @@ -52458,6 +61965,45 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-firehydrant: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Bot token to use for authenticating with the FireHydrant API.\ + \ You can find or create a bot token by logging into your organization\ + \ and visiting the Bot users page at https://app.firehydrant.io/organizations/bots." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "firehydrant" + const: "firehydrant" + enum: + - "firehydrant" + order: 0 + type: "string" + source-firehydrant-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Bot token to use for authenticating with the FireHydrant API.\ + \ You can find or create a bot token by logging into your organization\ + \ and visiting the Bot users page at https://app.firehydrant.io/organizations/bots." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-concord: type: "object" required: @@ -52509,6 +62055,64 @@ components: name: "organizationId" order: 1 title: "Environment" + source-e-conomic: + type: "object" + required: + - "app_secret_token" + - "agreement_grant_token" + - "sourceType" + properties: + app_secret_token: + type: "string" + description: + "Your private token that identifies your app. Find it in your\ + \ e-conomic account settings." + name: "app_secret_token" + order: 0 + title: "App Secret Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + agreement_grant_token: + type: "string" + description: + "Token that identifies the grant issued by an agreement, allowing\ + \ your app to access data. Obtain it from your e-conomic account settings." + name: "agreement_grant_token" + order: 1 + title: "Agreement Grant Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "e-conomic" + const: "e-conomic" + enum: + - "e-conomic" + order: 0 + type: "string" + source-e-conomic-update: + type: "object" + required: + - "app_secret_token" + - "agreement_grant_token" + properties: + app_secret_token: + type: "string" + description: + "Your private token that identifies your app. Find it in your\ + \ e-conomic account settings." + name: "app_secret_token" + order: 0 + title: "App Secret Token" + airbyte_secret: true + agreement_grant_token: + type: "string" + description: + "Token that identifies the grant issued by an agreement, allowing\ + \ your app to access data. Obtain it from your e-conomic account settings." + name: "agreement_grant_token" + order: 1 + title: "Agreement Grant Token" + airbyte_secret: true source-appfollow: type: "object" required: @@ -52815,6 +62419,55 @@ components: description: "API Secret" airbyte_secret: true order: 0 + source-gitbook: + type: "object" + required: + - "access_token" + - "space_id" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Personal access token for authenticating with the GitBook\ + \ API. You can view and manage your access tokens in the Developer settings\ + \ of your GitBook user account." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + space_id: + type: "string" + order: 1 + title: "Space Id" + sourceType: + title: "gitbook" + const: "gitbook" + enum: + - "gitbook" + order: 0 + type: "string" + source-gitbook-update: + type: "object" + required: + - "access_token" + - "space_id" + properties: + access_token: + type: "string" + description: + "Personal access token for authenticating with the GitBook\ + \ API. You can view and manage your access tokens in the Developer settings\ + \ of your GitBook user account." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + space_id: + type: "string" + order: 1 + title: "Space Id" source-miro: type: "object" required: @@ -53137,6 +62790,111 @@ components: \ issues fetching the stream, or checking the connection please set this\ \ to `False` instead." default: true + source-finnworlds: + type: "object" + required: + - "key" + - "start_date" + - "sourceType" + properties: + list: + type: "string" + description: "Choose isin, ticker, reg_lei or cik" + order: 0 + title: "List" + default: "ticker" + list_countries_for_bonds: + type: "string" + order: 1 + title: "List Countries for Bonds" + default: "country" + key: + type: "string" + order: 2 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + bond_type: + type: "array" + description: "For example 10y, 5y, 2y..." + order: 3 + title: "Bond Type" + countries: + type: "array" + description: "brazil, united states, italia, japan" + order: 4 + title: "Countries" + tickers: + type: "array" + description: "AAPL, T, MU, GOOG" + order: 5 + title: "Tickers" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + commodities: + type: "array" + description: "Options Available: beef, cheese, oil, ..." + order: 7 + title: "Commodities" + sourceType: + title: "finnworlds" + const: "finnworlds" + enum: + - "finnworlds" + order: 0 + type: "string" + source-finnworlds-update: + type: "object" + required: + - "key" + - "start_date" + properties: + list: + type: "string" + description: "Choose isin, ticker, reg_lei or cik" + order: 0 + title: "List" + default: "ticker" + list_countries_for_bonds: + type: "string" + order: 1 + title: "List Countries for Bonds" + default: "country" + key: + type: "string" + order: 2 + title: "API Key" + airbyte_secret: true + bond_type: + type: "array" + description: "For example 10y, 5y, 2y..." + order: 3 + title: "Bond Type" + countries: + type: "array" + description: "brazil, united states, italia, japan" + order: 4 + title: "Countries" + tickers: + type: "array" + description: "AAPL, T, MU, GOOG" + order: 5 + title: "Tickers" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + commodities: + type: "array" + description: "Options Available: beef, cheese, oil, ..." + order: 7 + title: "Commodities" source-google-calendar: type: "object" required: @@ -53433,12 +63191,178 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-zoho-billing: + type: "object" + required: + - "region" + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + name: "region" + order: 0 + title: "Region" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zoho-billing" + const: "zoho-billing" + enum: + - "zoho-billing" + order: 0 + type: "string" + source-zoho-billing-update: + type: "object" + required: + - "region" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + name: "region" + order: 0 + title: "Region" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + source-akeneo: + type: "object" + required: + - "host" + - "api_username" + - "password" + - "client_id" + - "sourceType" + properties: + host: + type: "string" + description: "https://cb8715249e.trial.akeneo.cloud" + order: 0 + title: "Host" + api_username: + type: "string" + order: 1 + title: "API Username" + password: + type: "string" + order: 2 + title: "Password" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + order: 3 + title: "Client ID" + secret: + type: "string" + order: 4 + title: "Secret" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "akeneo" + const: "akeneo" + enum: + - "akeneo" + order: 0 + type: "string" + source-akeneo-update: + type: "object" + required: + - "host" + - "api_username" + - "password" + - "client_id" + properties: + host: + type: "string" + description: "https://cb8715249e.trial.akeneo.cloud" + order: 0 + title: "Host" + api_username: + type: "string" + order: 1 + title: "API Username" + password: + type: "string" + order: 2 + title: "Password" + airbyte_secret: true + client_id: + type: "string" + order: 3 + title: "Client ID" + secret: + type: "string" + order: 4 + title: "Secret" + always_show: true + airbyte_secret: true source-amazon-ads: - title: "Amazon Ads Spec" + title: "Source Amazon Ads" type: "object" properties: auth_type: title: "Auth Type" + default: "oauth2.0" const: "oauth2.0" order: 0 type: "string" @@ -53450,9 +63374,9 @@ components: "The client ID of your Amazon Ads developer application. See\ \ the docs for more information." + airbyte_secret: true order: 1 type: "string" - airbyte_secret: true x-speakeasy-param-sensitive: true client_secret: title: "Client Secret" @@ -53478,25 +63402,176 @@ components: description: "Region to pull data from (EU/NA/FE). See docs for more details." + default: "NA" enum: - "NA" - "EU" - "FE" - type: "string" - default: "NA" order: 4 + type: "string" start_date: title: "Start Date" description: "The Start date for collecting reports, should not be more\ \ than 60 days in the past. In YYYY-MM-DD format" + examples: + - "2022-10-10" + - "2022-10-22" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 5 + type: "string" format: "date" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + default: [] + order: 8 + type: "array" + items: + title: "StateFilterEnum" + description: "An enumeration." + enum: + - "enabled" + - "paused" + - "archived" + type: "string" + uniqueItems: true + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + default: 3 + examples: + - 3 + - 10 + order: 9 + type: "integer" + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + default: [] + order: 10 + type: "array" + items: + title: "ReportRecordTypeEnum" + description: "An enumeration." + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "string" + uniqueItems: true + sourceType: + title: "amazon-ads" + const: "amazon-ads" + enum: + - "amazon-ads" + order: 0 + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + source-amazon-ads-update: + title: "Source Amazon Ads" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2.0" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + airbyte_secret: true + order: 1 + type: "string" + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + default: "NA" + enum: + - "NA" + - "EU" + - "FE" + order: 4 + type: "string" + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" examples: - "2022-10-10" - "2022-10-22" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" order: 5 type: "string" + format: "date" profiles: title: "Profile IDs" description: @@ -53526,170 +63601,29 @@ components: "Reflects the state of the Display, Product, and Brand Campaign\ \ streams as enabled, paused, or archived. If you do not populate this\ \ field, it will be ignored completely." - items: - type: "string" - enum: - - "enabled" - - "paused" - - "archived" - type: "array" - uniqueItems: true + default: [] order: 8 - look_back_window: - title: "Look Back Window" - description: - "The amount of days to go back in time to get the updated data\ - \ from Amazon Ads" - examples: - - 3 - - 10 - type: "integer" - default: 3 - order: 9 - report_record_types: - title: "Report Record Types" - description: - "Optional configuration which accepts an array of string of\ - \ record types. Leave blank for default behaviour to pull all report types.\ - \ Use this config option only if you want to pull specific report type(s).\ - \ See docs for more details" - items: - type: "string" - enum: - - "adGroups" - - "asins" - - "asins_keywords" - - "asins_targets" - - "campaigns" - - "keywords" - - "productAds" - - "targets" - type: "array" - uniqueItems: true - order: 10 - sourceType: - title: "amazon-ads" - const: "amazon-ads" - enum: - - "amazon-ads" - order: 0 - type: "string" - required: - - "client_id" - - "client_secret" - - "refresh_token" - - "sourceType" - source-amazon-ads-update: - title: "Amazon Ads Spec" - type: "object" - properties: - auth_type: - title: "Auth Type" - const: "oauth2.0" - order: 0 - type: "string" - enum: - - "oauth2.0" - client_id: - title: "Client ID" - description: - "The client ID of your Amazon Ads developer application. See\ - \ the docs for more information." - order: 1 - type: "string" - airbyte_secret: true - client_secret: - title: "Client Secret" - description: - "The client secret of your Amazon Ads developer application.\ - \ See the docs for more information." - airbyte_secret: true - order: 2 - type: "string" - refresh_token: - title: "Refresh Token" - description: - "Amazon Ads refresh token. See the docs for more information on how to obtain this token." - airbyte_secret: true - order: 3 - type: "string" - region: - title: "Region" - description: - "Region to pull data from (EU/NA/FE). See docs for more details." - enum: - - "NA" - - "EU" - - "FE" - type: "string" - default: "NA" - order: 4 - start_date: - title: "Start Date" - description: - "The Start date for collecting reports, should not be more\ - \ than 60 days in the past. In YYYY-MM-DD format" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - format: "date" - examples: - - "2022-10-10" - - "2022-10-22" - order: 5 - type: "string" - profiles: - title: "Profile IDs" - description: - "Profile IDs you want to fetch data for. The Amazon Ads source\ - \ connector supports only profiles with seller and vendor type, profiles\ - \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ - \ profiles will be selected if they match the Profile ID OR the Marketplace\ - \ ID." - order: 6 type: "array" items: - type: "integer" - marketplace_ids: - title: "Marketplace IDs" - description: - "Marketplace IDs you want to fetch data for. Note: If Profile\ - \ IDs are also selected, profiles will be selected if they match the Profile\ - \ ID OR the Marketplace ID." - order: 7 - type: "array" - items: - type: "string" - state_filter: - title: "State Filter" - description: - "Reflects the state of the Display, Product, and Brand Campaign\ - \ streams as enabled, paused, or archived. If you do not populate this\ - \ field, it will be ignored completely." - items: - type: "string" + title: "StateFilterEnum" + description: "An enumeration." enum: - "enabled" - "paused" - "archived" - type: "array" + type: "string" uniqueItems: true - order: 8 look_back_window: title: "Look Back Window" description: "The amount of days to go back in time to get the updated data\ \ from Amazon Ads" + default: 3 examples: - 3 - 10 - type: "integer" - default: 3 order: 9 + type: "integer" report_record_types: title: "Report Record Types" description: @@ -53698,8 +63632,12 @@ components: \ Use this config option only if you want to pull specific report type(s).\ \ See docs for more details" + default: [] + order: 10 + type: "array" items: - type: "string" + title: "ReportRecordTypeEnum" + description: "An enumeration." enum: - "adGroups" - "asins" @@ -53709,9 +63647,8 @@ components: - "keywords" - "productAds" - "targets" - type: "array" + type: "string" uniqueItems: true - order: 10 required: - "client_id" - "client_secret" @@ -54234,6 +64171,133 @@ components: format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 3 + source-newsdata-io: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + search_query: + type: "string" + description: + "Search news articles for specific keywords or phrases present\ + \ in the news title, content, URL, meta keywords and meta description." + title: "Search Query" + order: 1 + countries: + type: "array" + description: + "Search the news articles from a specific country. You can\ + \ add up to 5 countries in a single query. Example: au, jp, br" + title: "Countries" + order: 2 + categories: + type: "array" + description: + "Search the news articles for a specific category. You can\ + \ add up to 5 categories in a single query." + title: "Categories" + order: 3 + languages: + type: "array" + description: + "Search the news articles for a specific language. You can\ + \ add up to 5 languages in a single query. " + title: "Languages" + order: 4 + domains: + type: "array" + description: + "Search the news articles for specific domains or news sources.\ + \ You can add up to 5 domains in a single query. " + title: "Domains" + order: 5 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + end_date: + type: "string" + description: "Choose an end date. Now UTC is default value" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 7 + sourceType: + title: "newsdata-io" + const: "newsdata-io" + enum: + - "newsdata-io" + order: 0 + type: "string" + source-newsdata-io-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + search_query: + type: "string" + description: + "Search news articles for specific keywords or phrases present\ + \ in the news title, content, URL, meta keywords and meta description." + title: "Search Query" + order: 1 + countries: + type: "array" + description: + "Search the news articles from a specific country. You can\ + \ add up to 5 countries in a single query. Example: au, jp, br" + title: "Countries" + order: 2 + categories: + type: "array" + description: + "Search the news articles for a specific category. You can\ + \ add up to 5 categories in a single query." + title: "Categories" + order: 3 + languages: + type: "array" + description: + "Search the news articles for a specific language. You can\ + \ add up to 5 languages in a single query. " + title: "Languages" + order: 4 + domains: + type: "array" + description: + "Search the news articles for specific domains or news sources.\ + \ You can add up to 5 domains in a single query. " + title: "Domains" + order: 5 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + end_date: + type: "string" + description: "Choose an end date. Now UTC is default value" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 7 source-clazar: type: "object" required: @@ -56403,6 +66467,45 @@ components: >here." airbyte_secret: true order: 0 + source-repairshopr: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + title: "subdomain" + order: 1 + sourceType: + title: "repairshopr" + const: "repairshopr" + enum: + - "repairshopr" + order: 0 + type: "string" + source-repairshopr-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + title: "subdomain" + order: 1 source-hubplanner: title: "Hubplanner Spec" type: "object" @@ -57914,6 +68017,53 @@ components: type: "string" airbyte_secret: true order: 4 + source-lightspeed-retail: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key or access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "The subdomain for the retailer, e.g., 'example' in 'example.retail.lightspeed.app'." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "lightspeed-retail" + const: "lightspeed-retail" + enum: + - "lightspeed-retail" + order: 0 + type: "string" + source-lightspeed-retail-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: "API key or access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "The subdomain for the retailer, e.g., 'example' in 'example.retail.lightspeed.app'." + name: "subdomain" + order: 1 + title: "Subdomain" source-postmarkapp: type: "object" required: @@ -57960,6 +68110,107 @@ components: order: 1 title: "X-Postmark-Account-Token" airbyte_secret: true + source-finnhub: + type: "object" + required: + - "api_key" + - "symbols" + - "market_news_category" + - "exchange" + - "start_date_2" + - "sourceType" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + symbols: + type: "array" + name: "company_symbol" + order: 1 + title: "Companies" + market_news_category: + type: "string" + description: + "This parameter can be 1 of the following values general, forex,\ + \ crypto, merger." + title: "Market News Category" + default: "general" + enum: + - "general" + - "forex" + - "crypto" + - "merger" + order: 2 + exchange: + type: "string" + description: "More info: https://finnhub.io/docs/api/stock-symbols" + title: "Exchange" + default: "US" + order: 3 + start_date_2: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 + sourceType: + title: "finnhub" + const: "finnhub" + enum: + - "finnhub" + order: 0 + type: "string" + source-finnhub-update: + type: "object" + required: + - "api_key" + - "symbols" + - "market_news_category" + - "exchange" + - "start_date_2" + properties: + api_key: + type: "string" + description: "The API key to use for authentication" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + symbols: + type: "array" + name: "company_symbol" + order: 1 + title: "Companies" + market_news_category: + type: "string" + description: + "This parameter can be 1 of the following values general, forex,\ + \ crypto, merger." + title: "Market News Category" + default: "general" + enum: + - "general" + - "forex" + - "crypto" + - "merger" + order: 2 + exchange: + type: "string" + description: "More info: https://finnhub.io/docs/api/stock-symbols" + title: "Exchange" + default: "US" + order: 3 + start_date_2: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 4 source-float: type: "object" required: @@ -58070,6 +68321,66 @@ components: minimum: 1 default: 1000 order: 0 + source-onfleet: + type: "object" + required: + - "api_key" + - "password" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use for authenticating requests. You can create\ + \ and manage your API keys in the API section of the Onfleet dashboard." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + password: + type: "string" + description: + "Placeholder for basic HTTP auth password - should be set to\ + \ empty string" + name: "password" + order: 1 + title: "Placeholder Password" + default: "x" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "onfleet" + const: "onfleet" + enum: + - "onfleet" + order: 0 + type: "string" + source-onfleet-update: + type: "object" + required: + - "api_key" + - "password" + properties: + api_key: + type: "string" + description: + "API key to use for authenticating requests. You can create\ + \ and manage your API keys in the API section of the Onfleet dashboard." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + password: + type: "string" + description: + "Placeholder for basic HTTP auth password - should be set to\ + \ empty string" + name: "password" + order: 1 + title: "Placeholder Password" + default: "x" + airbyte_secret: true source-gorgias: type: "object" required: @@ -58610,7 +68921,118 @@ components: type: "string" airbyte_secret: true order: 1 - x-speakeasy-param-sensitive: true + x-speakeasy-param-sensitive: true + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + sourceType: + title: "sftp" + const: "sftp" + enum: + - "sftp" + order: 0 + type: "string" + source-sftp-update: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 - title: "SSH Key Authentication" required: - "auth_method" @@ -58632,7 +69054,6 @@ components: airbyte_secret: true multiline: true order: 1 - x-speakeasy-param-sensitive: true file_types: title: "File types" description: @@ -58662,116 +69083,75 @@ components: examples: - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" order: 6 + source-agilecrm: + type: "object" + required: + - "email" + - "domain" + - "api_key" + - "sourceType" + properties: + email: + type: "string" + description: + "Your Agile CRM account email address. This is used as the\ + \ username for authentication." + name: "email" + order: 0 + title: "Email Address" + domain: + type: "string" + description: "The specific subdomain for your Agile CRM account" + name: "domain" + order: 1 + title: "Domain" + api_key: + type: "string" + description: + "API key to use. Find it at Admin Settings -> API & Analytics\ + \ -> API Key in your Agile CRM account." + name: "api_key" + order: 2 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true sourceType: - title: "sftp" - const: "sftp" + title: "agilecrm" + const: "agilecrm" enum: - - "sftp" + - "agilecrm" order: 0 type: "string" - source-sftp-update: - title: "SFTP Source Spec" + source-agilecrm-update: type: "object" required: - - "user" - - "host" - - "port" + - "email" + - "domain" + - "api_key" properties: - user: - title: "User Name" - description: "The server user" + email: type: "string" + description: + "Your Agile CRM account email address. This is used as the\ + \ username for authentication." + name: "email" order: 0 - host: - title: "Host Address" - description: "The server host address" + title: "Email Address" + domain: type: "string" - examples: - - "www.host.com" - - "192.0.2.1" + description: "The specific subdomain for your Agile CRM account" + name: "domain" order: 1 - port: - title: "Port" - description: "The server port" - type: "integer" - default: 22 - examples: - - "22" - order: 2 - credentials: - type: "object" - title: "Authentication" - description: "The server authentication method" - order: 3 - oneOf: - - title: "Password Authentication" - required: - - "auth_method" - - "auth_user_password" - properties: - auth_method: - description: "Connect through password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - enum: - - "SSH_PASSWORD_AUTH" - auth_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 1 - - title: "SSH Key Authentication" - required: - - "auth_method" - - "auth_ssh_key" - properties: - auth_method: - description: "Connect through ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - enum: - - "SSH_KEY_AUTH" - auth_ssh_key: - title: "SSH Private Key" - description: - "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 1 - file_types: - title: "File types" - description: - "Coma separated file types. Currently only 'csv' and 'json'\ - \ types are supported." - type: "string" - default: "csv,json" - order: 4 - examples: - - "csv,json" - - "csv" - folder_path: - title: "Folder Path" - description: "The directory to search files for sync" + title: "Domain" + api_key: type: "string" - default: "" - examples: - - "/logs/2022" - order: 5 - file_pattern: - title: "File Pattern" description: - "The regular expression to specify files for sync in a chosen\ - \ Folder Path" - type: "string" - default: "" - examples: - - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" - order: 6 + "API key to use. Find it at Admin Settings -> API & Analytics\ + \ -> API Key in your Agile CRM account." + name: "api_key" + order: 2 + title: "API Key" + airbyte_secret: true source-google-drive: title: "Google Drive Source Spec" description: @@ -59791,6 +70171,45 @@ components: >here." airbyte_secret: true order: 2 + source-pabbly-subscriptions-billing: + type: "object" + required: + - "username" + - "sourceType" + properties: + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + username: + type: "string" + order: 0 + title: "Username" + sourceType: + title: "pabbly-subscriptions-billing" + const: "pabbly-subscriptions-billing" + enum: + - "pabbly-subscriptions-billing" + order: 0 + type: "string" + source-pabbly-subscriptions-billing-update: + type: "object" + required: + - "username" + properties: + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + username: + type: "string" + order: 0 + title: "Username" source-chameleon: type: "object" required: @@ -61147,8 +71566,61 @@ components: title: "Refresh token" airbyte_secret: true order: 4 + source-taboola: + type: "object" + required: + - "client_id" + - "client_secret" + - "account_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + description: "The ID associated with your taboola account" + order: 2 + title: "Account ID" + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "taboola" + const: "taboola" + enum: + - "taboola" + order: 0 + type: "string" + source-taboola-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "account_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + account_id: + type: "string" + description: "The ID associated with your taboola account" + order: 2 + title: "Account ID" + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true source-qualaroo: - title: "Qualaroo Spec" type: "object" required: - "token" @@ -61158,39 +71630,43 @@ components: properties: token: type: "string" - title: "API token" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API token" airbyte_secret: true + order: 0 x-speakeasy-param-sensitive: true key: type: "string" - title: "API key" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API key" airbyte_secret: true + order: 1 x-speakeasy-param-sensitive: true start_date: type: "string" - title: "Start Date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ \ data before this date will not be replicated." + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" examples: - "2021-03-01T00:00:00.000Z" + order: 2 survey_ids: type: "array" - items: - type: "string" - pattern: "^[0-9]{1,8}$" - title: "Qualaroo survey IDs" description: "IDs of the surveys from which you'd like to replicate data.\ \ If left empty, data from all surveys to which you have access will be\ \ replicated." + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + order: 3 sourceType: title: "qualaroo" const: "qualaroo" @@ -61199,7 +71675,6 @@ components: order: 0 type: "string" source-qualaroo-update: - title: "Qualaroo Spec" type: "object" required: - "token" @@ -61208,37 +71683,41 @@ components: properties: token: type: "string" - title: "API token" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API token" airbyte_secret: true + order: 0 key: type: "string" - title: "API key" description: "A Qualaroo token. See the docs for instructions on how to generate it." + title: "API key" airbyte_secret: true + order: 1 start_date: type: "string" - title: "Start Date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ \ data before this date will not be replicated." + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" examples: - "2021-03-01T00:00:00.000Z" + order: 2 survey_ids: type: "array" - items: - type: "string" - pattern: "^[0-9]{1,8}$" - title: "Qualaroo survey IDs" description: "IDs of the surveys from which you'd like to replicate data.\ \ If left empty, data from all surveys to which you have access will be\ \ replicated." + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + order: 3 source-front: type: "object" required: @@ -61641,6 +72120,283 @@ components: additionalProperties: true order: 3 title: "Authentication mechanism" + source-sendowl: + type: "object" + required: + - "username" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + description: "Enter you API Key" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API secret" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "sendowl" + const: "sendowl" + enum: + - "sendowl" + order: 0 + type: "string" + source-sendowl-update: + type: "object" + required: + - "username" + - "start_date" + properties: + username: + type: "string" + description: "Enter you API Key" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API secret" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-getgist: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Integration Settings on your\ + \ Gist dashboard at https://app.getgist.com/projects/_/settings/api-key." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "getgist" + const: "getgist" + enum: + - "getgist" + order: 0 + type: "string" + source-getgist-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. Find it in the Integration Settings on your\ + \ Gist dashboard at https://app.getgist.com/projects/_/settings/api-key." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-mailtrap: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://mailtrap.io/account" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "mailtrap" + const: "mailtrap" + enum: + - "mailtrap" + order: 0 + type: "string" + source-mailtrap-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://mailtrap.io/account" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-cloudbeds: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "cloudbeds" + const: "cloudbeds" + enum: + - "cloudbeds" + order: 0 + type: "string" + source-cloudbeds-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + source-freshbooks: + type: "object" + required: + - "client_id" + - "client_secret" + - "redirect_uri" + - "account_id" + - "client_refresh_token" + - "business_uuid" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + redirect_uri: + type: "string" + order: 2 + title: "Redirect Uri" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + order: 3 + title: "Account Id" + client_refresh_token: + type: "string" + order: 4 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + oauth_access_token: + type: "string" + description: + "The current access token. This field might be overridden by\ + \ the connector based on the token refresh endpoint response." + order: 5 + title: "Access token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + oauth_token_expiry_date: + type: "string" + description: + "The date the current access token expires in. This field might\ + \ be overridden by the connector based on the token refresh endpoint response." + order: 6 + title: "Token expiry date" + format: "date-time" + business_uuid: + type: "string" + order: 7 + title: "Business uuid" + sourceType: + title: "freshbooks" + const: "freshbooks" + enum: + - "freshbooks" + order: 0 + type: "string" + source-freshbooks-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "redirect_uri" + - "account_id" + - "client_refresh_token" + - "business_uuid" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + redirect_uri: + type: "string" + order: 2 + title: "Redirect Uri" + airbyte_secret: true + account_id: + type: "string" + order: 3 + title: "Account Id" + client_refresh_token: + type: "string" + order: 4 + title: "Refresh token" + airbyte_secret: true + oauth_access_token: + type: "string" + description: + "The current access token. This field might be overridden by\ + \ the connector based on the token refresh endpoint response." + order: 5 + title: "Access token" + airbyte_secret: true + oauth_token_expiry_date: + type: "string" + description: + "The date the current access token expires in. This field might\ + \ be overridden by the connector based on the token refresh endpoint response." + order: 6 + title: "Token expiry date" + format: "date-time" + business_uuid: + type: "string" + order: 7 + title: "Business uuid" source-just-sift: type: "object" required: @@ -62218,31 +72974,31 @@ components: properties: api_key: type: "string" - title: "API Key" - airbyte_secret: true description: "Recurly API Key. See the docs for more information on how to generate this key." order: 0 + title: "API Key" + airbyte_secret: true x-speakeasy-param-sensitive: true begin_time: type: "string" description: "ISO8601 timestamp from which the replication from Recurly\ \ API will start from." + order: 1 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 1 end_time: type: "string" description: "ISO8601 timestamp to which the replication from Recurly API\ \ will stop. Records after that date won't be imported." + order: 2 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 2 sourceType: title: "recurly" const: "recurly" @@ -62257,30 +73013,30 @@ components: properties: api_key: type: "string" - title: "API Key" - airbyte_secret: true description: "Recurly API Key. See the docs for more information on how to generate this key." order: 0 + title: "API Key" + airbyte_secret: true begin_time: type: "string" description: "ISO8601 timestamp from which the replication from Recurly\ \ API will start from." + order: 1 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 1 end_time: type: "string" description: "ISO8601 timestamp to which the replication from Recurly API\ \ will stop. Records after that date won't be imported." + order: 2 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" examples: - "2021-12-01T00:00:00" - pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" - order: 2 source-pennylane: type: "object" required: @@ -62448,6 +73204,7 @@ components: title: "Zendesk Chat Spec" type: "object" required: + - "subdomain" - "start_date" - "sourceType" properties: @@ -62465,8 +73222,12 @@ components: type: "string" title: "Subdomain" description: - "Required if you access Zendesk Chat from a Zendesk Support\ - \ subdomain." + "The unique subdomain of your Zendesk account (without https://).\ + \ See the Zendesk docs to find your subdomain" + pattern: "^(?!https://)" + examples: + - "myzendeskchat" default: "" credentials: title: "Authorization Method" @@ -62538,6 +73299,7 @@ components: title: "Zendesk Chat Spec" type: "object" required: + - "subdomain" - "start_date" properties: start_date: @@ -62554,8 +73316,12 @@ components: type: "string" title: "Subdomain" description: - "Required if you access Zendesk Chat from a Zendesk Support\ - \ subdomain." + "The unique subdomain of your Zendesk account (without https://).\ + \ See the Zendesk docs to find your subdomain" + pattern: "^(?!https://)" + examples: + - "myzendeskchat" default: "" credentials: title: "Authorization Method" @@ -63053,6 +73819,147 @@ components: order: 2 title: "API Endpoint Prefix" default: "api" + source-nocrm: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the admin section of your\ + \ noCRM.io account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: + "The subdomain specific to your noCRM.io account, e.g., 'yourcompany'\ + \ in 'yourcompany.nocrm.io'." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "nocrm" + const: "nocrm" + enum: + - "nocrm" + order: 0 + type: "string" + source-nocrm-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: + "API key to use. Generate it from the admin section of your\ + \ noCRM.io account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: + "The subdomain specific to your noCRM.io account, e.g., 'yourcompany'\ + \ in 'yourcompany.nocrm.io'." + name: "subdomain" + order: 1 + title: "Subdomain" + source-openaq: + type: "object" + required: + - "api_key" + - "country_ids" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + country_ids: + type: "array" + description: + "The list of IDs of countries (comma separated) you need the\ + \ data for, check more: https://docs.openaq.org/resources/countries" + order: 1 + title: "Countries" + sourceType: + title: "openaq" + const: "openaq" + enum: + - "openaq" + order: 0 + type: "string" + source-openaq-update: + type: "object" + required: + - "api_key" + - "country_ids" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + country_ids: + type: "array" + description: + "The list of IDs of countries (comma separated) you need the\ + \ data for, check more: https://docs.openaq.org/resources/countries" + order: 1 + title: "Countries" + source-deputy: + type: "object" + required: + - "base_url" + - "api_key" + - "sourceType" + properties: + base_url: + type: "string" + description: "The base url for your deputy account to make API requests" + order: 0 + title: "Base URL" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "deputy" + const: "deputy" + enum: + - "deputy" + order: 0 + type: "string" + source-deputy-update: + type: "object" + required: + - "base_url" + - "api_key" + properties: + base_url: + type: "string" + description: "The base url for your deputy account to make API requests" + order: 0 + title: "Base URL" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true source-workflowmax: type: "object" required: @@ -63178,6 +74085,81 @@ components: always_show: true airbyte_secret: true order: 3 + source-stockdata: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + symbols: + type: "array" + order: 1 + title: "Symbols" + industries: + type: "array" + description: + "Specify the industries of entities which have been identified\ + \ within the article." + order: 2 + title: "Industries" + filter_entities: + type: "boolean" + order: 3 + title: "Entities" + default: false + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "stockdata" + const: "stockdata" + enum: + - "stockdata" + order: 0 + type: "string" + source-stockdata-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + symbols: + type: "array" + order: 1 + title: "Symbols" + industries: + type: "array" + description: + "Specify the industries of entities which have been identified\ + \ within the article." + order: 2 + title: "Industries" + filter_entities: + type: "boolean" + order: 3 + title: "Entities" + default: false + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-dockerhub: type: "object" required: @@ -66007,6 +76989,43 @@ components: order: 0 title: "Bearer Token" airbyte_secret: true + source-ubidots: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use for authentication. Obtain it from your Ubidots\ + \ account." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "ubidots" + const: "ubidots" + enum: + - "ubidots" + order: 0 + type: "string" + source-ubidots-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use for authentication. Obtain it from your Ubidots\ + \ account." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-height: type: "object" required: @@ -66144,6 +77163,43 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-ruddr: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it in the API Keys section of your\ + \ Ruddr workspace settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "ruddr" + const: "ruddr" + enum: + - "ruddr" + order: 0 + type: "string" + source-ruddr-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it in the API Keys section of your\ + \ Ruddr workspace settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-polygon-stock-api: type: "object" required: @@ -66811,13 +77867,14 @@ components: - id: "advanced" title: "Advanced" source-retently: - title: "Retently Api Spec" type: "object" + required: + - "sourceType" properties: credentials: - title: "Authentication Mechanism" - description: "Choose how to authenticate to Retently" type: "object" + description: "Choose how to authenticate to Retently" + title: "Authentication Mechanism" oneOf: - type: "object" title: "Authenticate via Retently (OAuth)" @@ -66834,21 +77891,21 @@ components: enum: - "Client" client_id: - title: "Client ID" type: "string" description: "The Client ID of your Retently developer application." + title: "Client ID" client_secret: - title: "Client Secret" type: "string" description: "The Client Secret of your Retently developer application." + title: "Client Secret" airbyte_secret: true x-speakeasy-param-sensitive: true refresh_token: - title: "Refresh Token" type: "string" description: "Retently Refresh Token which can be used to fetch new\ \ Bearer Tokens when the current one expires." + title: "Refresh Token" airbyte_secret: true x-speakeasy-param-sensitive: true - type: "object" @@ -66864,13 +77921,14 @@ components: enum: - "Token" api_key: - title: "API Token" + type: "string" description: "Retently API Token. See the docs for more information on how to obtain this key." - type: "string" + title: "API Token" airbyte_secret: true x-speakeasy-param-sensitive: true + order: 0 sourceType: title: "retently" const: "retently" @@ -66879,13 +77937,13 @@ components: order: 0 type: "string" source-retently-update: - title: "Retently Api Spec" type: "object" + required: [] properties: credentials: - title: "Authentication Mechanism" - description: "Choose how to authenticate to Retently" type: "object" + description: "Choose how to authenticate to Retently" + title: "Authentication Mechanism" oneOf: - type: "object" title: "Authenticate via Retently (OAuth)" @@ -66902,20 +77960,20 @@ components: enum: - "Client" client_id: - title: "Client ID" type: "string" description: "The Client ID of your Retently developer application." + title: "Client ID" client_secret: - title: "Client Secret" type: "string" description: "The Client Secret of your Retently developer application." + title: "Client Secret" airbyte_secret: true refresh_token: - title: "Refresh Token" type: "string" description: "Retently Refresh Token which can be used to fetch new\ \ Bearer Tokens when the current one expires." + title: "Refresh Token" airbyte_secret: true - type: "object" title: "Authenticate with API Token" @@ -66930,12 +77988,13 @@ components: enum: - "Token" api_key: - title: "API Token" + type: "string" description: "Retently API Token. See the docs for more information on how to obtain this key." - type: "string" + title: "API Token" airbyte_secret: true + order: 0 source-jotform: type: "object" required: @@ -67150,6 +78209,45 @@ components: type: "string" required: - "access_token" + source-nutshell: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "API Token" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "nutshell" + const: "nutshell" + enum: + - "nutshell" + order: 0 + type: "string" + source-nutshell-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "API Token" + always_show: true + airbyte_secret: true source-dbt: type: "object" required: @@ -67675,6 +78773,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 6 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" bucket: title: "Bucket" description: "Name of the S3 bucket where the file(s) exist." @@ -68149,6 +79290,49 @@ components: required: - "name" - "format" + delivery_method: + title: "Delivery Method" + default: "use_records_transfer" + type: "object" + order: 6 + display_type: "radio" + group: "advanced" + oneOf: + - title: "Replicate Records" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_records_transfer" + const: "use_records_transfer" + enum: + - "use_records_transfer" + type: "string" + description: + "Recommended - Extract and load structured records into your\ + \ destination of choice. This is the classic method of moving data in\ + \ Airbyte. It allows for blocking and hashing individual fields or files\ + \ from a structured schema. Data can be flattened, typed and deduped\ + \ depending on the destination." + required: + - "delivery_type" + - title: "Copy Raw Files" + type: "object" + properties: + delivery_type: + title: "Delivery Type" + default: "use_file_transfer" + const: "use_file_transfer" + enum: + - "use_file_transfer" + type: "string" + description: + "Copy raw files without parsing their contents. Bits are\ + \ copied into the destination exactly as they appeared in the source.\ + \ Recommended for use with unstructured text data, non-text and compressed\ + \ files." + required: + - "delivery_type" bucket: title: "Bucket" description: "Name of the S3 bucket where the file(s) exist." @@ -68324,32 +79508,88 @@ components: examples: - "2020-10-15T00:00:00Z" order: 3 + source-box: + type: "object" + required: + - "client_id" + - "client_secret" + - "user" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + user: + type: "number" + order: 2 + title: "User" + sourceType: + title: "box" + const: "box" + enum: + - "box" + order: 0 + type: "string" + source-box-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "user" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + user: + type: "number" + order: 2 + title: "User" source-zendesk-sunshine: type: "object" required: - - "start_date" - "subdomain" + - "start_date" - "sourceType" properties: subdomain: type: "string" + description: "The subdomain for your Zendesk Account." order: 0 title: "Subdomain" - description: "The subdomain for your Zendesk Account." start_date: type: "string" - title: "Start date" - format: "date-time" description: "The date from which you'd like to replicate data for Zendesk\ \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + title: "Start date" + format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-01-01T00:00:00Z" order: 1 credentials: - title: "Authorization Method" type: "object" + title: "Authorization Method" oneOf: - type: "object" title: "OAuth2.0" @@ -68368,20 +79608,20 @@ components: order: 0 client_id: type: "string" - title: "Client ID" description: "The Client ID of your OAuth application." + title: "Client ID" airbyte_secret: true x-speakeasy-param-sensitive: true client_secret: type: "string" - title: "Client Secret" description: "The Client Secret of your OAuth application." + title: "Client Secret" airbyte_secret: true x-speakeasy-param-sensitive: true access_token: type: "string" - title: "Access Token" description: "Long-term access Token for making authenticated requests." + title: "Access Token" airbyte_secret: true x-speakeasy-param-sensitive: true - type: "object" @@ -68400,16 +79640,17 @@ components: order: 1 api_token: type: "string" - title: "API Token" description: "API Token. See the docs for information on how to generate this key." + title: "API Token" airbyte_secret: true x-speakeasy-param-sensitive: true email: type: "string" - title: "Email" description: "The user email for your Zendesk account" + title: "Email" + order: 2 sourceType: title: "zendesk-sunshine" const: "zendesk-sunshine" @@ -68420,28 +79661,28 @@ components: source-zendesk-sunshine-update: type: "object" required: - - "start_date" - "subdomain" + - "start_date" properties: subdomain: type: "string" + description: "The subdomain for your Zendesk Account." order: 0 title: "Subdomain" - description: "The subdomain for your Zendesk Account." start_date: type: "string" - title: "Start date" - format: "date-time" description: "The date from which you'd like to replicate data for Zendesk\ \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + title: "Start date" + format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-01-01T00:00:00Z" order: 1 credentials: - title: "Authorization Method" type: "object" + title: "Authorization Method" oneOf: - type: "object" title: "OAuth2.0" @@ -68460,18 +79701,18 @@ components: order: 0 client_id: type: "string" - title: "Client ID" description: "The Client ID of your OAuth application." + title: "Client ID" airbyte_secret: true client_secret: type: "string" - title: "Client Secret" description: "The Client Secret of your OAuth application." + title: "Client Secret" airbyte_secret: true access_token: type: "string" - title: "Access Token" description: "Long-term access Token for making authenticated requests." + title: "Access Token" airbyte_secret: true - type: "object" title: "API Token" @@ -68489,15 +79730,16 @@ components: order: 1 api_token: type: "string" - title: "API Token" description: "API Token. See the docs for information on how to generate this key." + title: "API Token" airbyte_secret: true email: type: "string" - title: "Email" description: "The user email for your Zendesk account" + title: "Email" + order: 2 source-mention: type: "object" required: @@ -69341,6 +80583,55 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-track-pms: + type: "object" + required: + - "customer_domain" + - "api_key" + - "sourceType" + properties: + customer_domain: + type: "string" + order: 0 + title: "Customer Domain" + api_key: + type: "string" + order: 1 + title: "API Key" + api_secret: + type: "string" + order: 2 + title: "API Secret" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "track-pms" + const: "track-pms" + enum: + - "track-pms" + order: 0 + type: "string" + source-track-pms-update: + type: "object" + required: + - "customer_domain" + - "api_key" + properties: + customer_domain: + type: "string" + order: 0 + title: "Customer Domain" + api_key: + type: "string" + order: 1 + title: "API Key" + api_secret: + type: "string" + order: 2 + title: "API Secret" + always_show: true + airbyte_secret: true source-whisky-hunter: type: "object" required: @@ -69607,6 +80898,45 @@ components: "The date from which you'd like to replicate data for Salesloft\ \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ \ date will be replicated." + source-youtube-data: + type: "object" + required: + - "api_key" + - "channel_ids" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + channel_ids: + type: "array" + order: 1 + title: "Channel IDs" + sourceType: + title: "youtube-data" + const: "youtube-data" + enum: + - "youtube-data" + order: 0 + type: "string" + source-youtube-data-update: + type: "object" + required: + - "api_key" + - "channel_ids" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + channel_ids: + type: "array" + order: 1 + title: "Channel IDs" source-yandex-metrica: title: "Yandex Metrica Spec" type: "object" @@ -70385,6 +81715,94 @@ components: - "2023-08-05T00:43:59.244Z" default: "2023-08-05T00:43:59.244Z" airbyte_secret: false + source-cal-com: + type: "object" + required: + - "orgId" + - "api_key" + - "sourceType" + properties: + orgId: + type: "string" + name: "Organization ID" + order: 0 + title: "orgId" + api_key: + type: "string" + description: "API key to use. Find it at https://cal.com/account" + name: "api_key" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "cal-com" + const: "cal-com" + enum: + - "cal-com" + order: 0 + type: "string" + source-cal-com-update: + type: "object" + required: + - "orgId" + - "api_key" + properties: + orgId: + type: "string" + name: "Organization ID" + order: 0 + title: "orgId" + api_key: + type: "string" + description: "API key to use. Find it at https://cal.com/account" + name: "api_key" + order: 1 + title: "API Key" + airbyte_secret: true + source-oveit: + type: "object" + required: + - "email" + - "password" + - "sourceType" + properties: + email: + type: "string" + description: "Oveit's login Email" + order: 0 + title: "Email" + password: + type: "string" + description: "Oveit's login Password" + order: 1 + title: "Password" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "oveit" + const: "oveit" + enum: + - "oveit" + order: 0 + type: "string" + source-oveit-update: + type: "object" + required: + - "email" + - "password" + properties: + email: + type: "string" + description: "Oveit's login Email" + order: 0 + title: "Email" + password: + type: "string" + description: "Oveit's login Password" + order: 1 + title: "Password" + airbyte_secret: true source-clockify: type: "object" required: @@ -72235,6 +83653,57 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-vercel: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token to authenticate with the Vercel API. Create and\ + \ manage tokens in your Vercel account settings." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "vercel" + const: "vercel" + enum: + - "vercel" + order: 0 + type: "string" + source-vercel-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Access token to authenticate with the Vercel API. Create and\ + \ manage tokens in your Vercel account settings." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" source-orbit: type: "object" required: @@ -72436,6 +83905,140 @@ components: title: "Domain name" description: "Your Confluence domain name" order: 2 + source-zoho-expense: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + data_center: + type: "string" + description: + "The domain suffix for the Zoho Expense API based on your data\ + \ center location (e.g., 'com', 'eu', 'in', etc.)" + enum: + - "com" + - "in" + - "jp" + - "ca" + - "com.cn" + - "sa" + - "com.au" + - "eu" + name: "domain" + order: 0 + title: "Data Center" + default: "com" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zoho-expense" + const: "zoho-expense" + enum: + - "zoho-expense" + order: 0 + type: "string" + source-zoho-expense-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + data_center: + type: "string" + description: + "The domain suffix for the Zoho Expense API based on your data\ + \ center location (e.g., 'com', 'eu', 'in', etc.)" + enum: + - "com" + - "in" + - "jp" + - "ca" + - "com.cn" + - "sa" + - "com.au" + - "eu" + name: "domain" + order: 0 + title: "Data Center" + default: "com" + client_id: + type: "string" + name: "client_id" + order: 1 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + source-formbricks: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate and find it in your Postman\ + \ account settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "formbricks" + const: "formbricks" + enum: + - "formbricks" + order: 0 + type: "string" + source-formbricks-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate and find it in your Postman\ + \ account settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-coin-api: title: "Coin API Spec" type: "object" @@ -72610,6 +84213,105 @@ components: \ to the name of the project . Example: 44056" order: 1 title: "Project Id" + source-zoho-bigin: + type: "object" + required: + - "client_id" + - "data_center" + - "client_secret" + - "client_refresh_token" + - "module_name" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + data_center: + type: "string" + description: "The data center where the Bigin account's resources are hosted" + enum: + - "com" + - "com.au" + - "eu" + - "in" + - "com.cn" + - "jp" + name: "data_center" + order: 1 + title: "Data Center" + default: "com" + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 3 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + module_name: + type: "string" + order: 4 + title: "Module Name" + sourceType: + title: "zoho-bigin" + const: "zoho-bigin" + enum: + - "zoho-bigin" + order: 0 + type: "string" + source-zoho-bigin-update: + type: "object" + required: + - "client_id" + - "data_center" + - "client_secret" + - "client_refresh_token" + - "module_name" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + data_center: + type: "string" + description: "The data center where the Bigin account's resources are hosted" + enum: + - "com" + - "com.au" + - "eu" + - "in" + - "com.cn" + - "jp" + name: "data_center" + order: 1 + title: "Data Center" + default: "com" + client_secret: + type: "string" + name: "client_secret" + order: 2 + title: "OAuth Client Secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 3 + title: "Refresh token" + airbyte_secret: true + module_name: + type: "string" + order: 4 + title: "Module Name" source-slack: title: "Slack Spec" type: "object" @@ -72855,6 +84557,61 @@ components: >docs for instructions on how to generate it." airbyte_secret: true order: 1 + source-tremendous: + type: "object" + required: + - "api_key" + - "environment" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate an API key through the Tremendous\ + \ dashboard under Team Settings > Developers. Save the key once you’ve\ + \ generated it." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + environment: + type: "string" + enum: + - "api" + - "testflight" + order: 1 + title: "Environment" + sourceType: + title: "tremendous" + const: "tremendous" + enum: + - "tremendous" + order: 0 + type: "string" + source-tremendous-update: + type: "object" + required: + - "api_key" + - "environment" + properties: + api_key: + type: "string" + description: + "API key to use. You can generate an API key through the Tremendous\ + \ dashboard under Team Settings > Developers. Save the key once you’ve\ + \ generated it." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + environment: + type: "string" + enum: + - "api" + - "testflight" + order: 1 + title: "Environment" source-gainsight-px: type: "object" required: @@ -72890,6 +84647,35 @@ components: "The Aptrinsic API Key which is recieved from the dashboard\ \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" order: 0 + source-humanitix: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "humanitix" + const: "humanitix" + enum: + - "humanitix" + order: 0 + type: "string" + source-humanitix-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 source-plaid: type: "object" required: @@ -74516,6 +86302,99 @@ components: order: 0 title: "API Key" airbyte_secret: true + source-zoho-invoice: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "region" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: "To be provided if a user belongs to multiple organizations" + order: 3 + title: "Organization ID" + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + order: 4 + title: "Region" + sourceType: + title: "zoho-invoice" + const: "zoho-invoice" + enum: + - "zoho-invoice" + order: 0 + type: "string" + source-zoho-invoice-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "region" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + organization_id: + type: "string" + description: "To be provided if a user belongs to multiple organizations" + order: 3 + title: "Organization ID" + region: + type: "string" + enum: + - "com" + - "eu" + - "in" + - "com.cn" + - "com.au" + - "jp" + - "sa" + - "ca" + order: 4 + title: "Region" source-breezy-hr: type: "object" required: @@ -75488,6 +87367,95 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-revolut-merchant: + type: "object" + required: + - "api_version" + - "secret_api_key" + - "start_date" + - "environment" + - "sourceType" + properties: + api_version: + type: "string" + description: + "Specify the API version to use. This is required for certain\ + \ API calls. Example: '2024-09-01'." + name: "api_version" + title: "API Version" + order: 0 + secret_api_key: + type: "string" + description: + "Secret API key to use for authenticating with the Revolut\ + \ Merchant API. Find it in your Revolut Business account under APIs >\ + \ Merchant API." + name: "secret_api_key" + title: "Secret API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + environment: + type: "string" + description: "The base url of your environment. Either sandbox or production" + title: "environment" + enum: + - "sandbox-merchant" + - "merchant" + order: 3 + sourceType: + title: "revolut-merchant" + const: "revolut-merchant" + enum: + - "revolut-merchant" + order: 0 + type: "string" + source-revolut-merchant-update: + type: "object" + required: + - "api_version" + - "secret_api_key" + - "start_date" + - "environment" + properties: + api_version: + type: "string" + description: + "Specify the API version to use. This is required for certain\ + \ API calls. Example: '2024-09-01'." + name: "api_version" + title: "API Version" + order: 0 + secret_api_key: + type: "string" + description: + "Secret API key to use for authenticating with the Revolut\ + \ Merchant API. Find it in your Revolut Business account under APIs >\ + \ Merchant API." + name: "secret_api_key" + title: "Secret API Key" + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + environment: + type: "string" + description: "The base url of your environment. Either sandbox or production" + title: "environment" + enum: + - "sandbox-merchant" + - "merchant" + order: 3 source-hibob: type: "object" required: @@ -76139,6 +88107,23 @@ components: \ token." airbyte_secret: true x-speakeasy-param-sensitive: true + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Zendesk Support plan. More\ + \ info about the rate limit plan tiers can be found on Zendesk's API docs." + order: 3 sourceType: title: "zendesk-support" const: "zendesk-support" @@ -76241,6 +88226,23 @@ components: >full documentation for more information on generating this\ \ token." airbyte_secret: true + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 40 + default: 3 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker threads to use for the sync. The performance\ + \ upper boundary is based on the limit of your Zendesk Support plan. More\ + \ info about the rate limit plan tiers can be found on Zendesk's API docs." + order: 3 source-veeqo: type: "object" required: @@ -76703,6 +88705,45 @@ components: title: "Client Secret" airbyte_secret: true order: 4 + source-spotlercrm: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access Token to authenticate API requests. Generate it by\ + \ logging into your CRM system, navigating to Settings / Integrations\ + \ / API V4, and clicking 'generate new key'." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "spotlercrm" + const: "spotlercrm" + enum: + - "spotlercrm" + order: 0 + type: "string" + source-spotlercrm-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + description: + "Access Token to authenticate API requests. Generate it by\ + \ logging into your CRM system, navigating to Settings / Integrations\ + \ / API V4, and clicking 'generate new key'." + name: "access_token" + order: 0 + title: "Access Token" + airbyte_secret: true source-ashby: type: "object" required: @@ -76795,6 +88836,48 @@ components: \ Access and select API integration." airbyte_secret: true order: 0 + source-freightview: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "freightview" + const: "freightview" + enum: + - "freightview" + order: 0 + type: "string" + source-freightview-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client Secret" + airbyte_secret: true source-onesignal: type: "object" required: @@ -81492,6 +93575,43 @@ components: pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" order: 1 format: "date-time" + source-pretix: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Obtain it from the pretix web interface\ + \ by creating a new token under your team settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "pretix" + const: "pretix" + enum: + - "pretix" + order: 0 + type: "string" + source-pretix-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Obtain it from the pretix web interface\ + \ by creating a new token under your team settings." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true source-cimis: type: "object" required: @@ -82058,6 +94178,61 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-blogger: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "blogger" + const: "blogger" + enum: + - "blogger" + order: 0 + type: "string" + source-blogger-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "client_refresh_token" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + client_refresh_token: + type: "string" + order: 2 + title: "Refresh token" + airbyte_secret: true source-codefresh: type: "object" required: @@ -83446,6 +95621,92 @@ components: >docs for more information on where to find that key." airbyte_secret: true order: 0 + source-web-scrapper: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://cloud.webscraper.io/api" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "web-scrapper" + const: "web-scrapper" + enum: + - "web-scrapper" + order: 0 + type: "string" + source-web-scrapper-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: "API token to use. Find it at https://cloud.webscraper.io/api" + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-wufoo: + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your Wufoo API Key. You can find it by logging into your Wufoo\ + \ account, selecting 'API Information' from the 'More' dropdown on any\ + \ form, and locating the 16-digit code." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: "Your account subdomain/username for Wufoo." + name: "subdomain" + order: 1 + title: "Subdomain" + sourceType: + title: "wufoo" + const: "wufoo" + enum: + - "wufoo" + order: 0 + type: "string" + source-wufoo-update: + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + description: + "Your Wufoo API Key. You can find it by logging into your Wufoo\ + \ account, selecting 'API Information' from the 'More' dropdown on any\ + \ form, and locating the 16-digit code." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: "Your account subdomain/username for Wufoo." + name: "subdomain" + order: 1 + title: "Subdomain" source-configcat: type: "object" required: @@ -83562,6 +95823,181 @@ components: description: "Your Insightly API token." airbyte_secret: true order: 1 + source-zoho-desk: + type: "object" + required: + - "client_id" + - "client_secret" + - "token_refresh_endpoint" + - "refresh_token" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_refresh_endpoint: + type: "string" + name: "token_refresh_endpoint" + order: 2 + title: "Token Refresh Endpoint" + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + include_custom_domain: + type: "boolean" + order: 4 + title: "include Custom Domain" + sourceType: + title: "zoho-desk" + const: "zoho-desk" + enum: + - "zoho-desk" + order: 0 + type: "string" + source-zoho-desk-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "token_refresh_endpoint" + - "refresh_token" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + token_refresh_endpoint: + type: "string" + name: "token_refresh_endpoint" + order: 2 + title: "Token Refresh Endpoint" + refresh_token: + type: "string" + name: "refresh_token" + order: 3 + title: "OAuth Refresh Token" + airbyte_secret: true + include_custom_domain: + type: "boolean" + order: 4 + title: "include Custom Domain" + source-pipeliner: + type: "object" + required: + - "username" + - "service" + - "spaceid" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + service: + type: "string" + enum: + - "eu-central" + - "us-east" + - "ca-central" + - "ap-southeast" + order: 2 + title: "Data Center" + spaceid: + type: "string" + order: 3 + title: "Space ID" + sourceType: + title: "pipeliner" + const: "pipeliner" + enum: + - "pipeliner" + order: 0 + type: "string" + source-pipeliner-update: + type: "object" + required: + - "username" + - "service" + - "spaceid" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + service: + type: "string" + enum: + - "eu-central" + - "us-east" + - "ca-central" + - "ap-southeast" + order: 2 + title: "Data Center" + spaceid: + type: "string" + order: 3 + title: "Space ID" + source-opinion-stage: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "opinion-stage" + const: "opinion-stage" + enum: + - "opinion-stage" + order: 0 + type: "string" + source-opinion-stage-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 source-cart: title: "Cart.com Spec" type: "object" @@ -84793,6 +97229,37 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-salesflare: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Enter you api key like this : Bearer YOUR_API_KEY" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "salesflare" + const: "salesflare" + enum: + - "salesflare" + order: 0 + type: "string" + source-salesflare-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "Enter you api key like this : Bearer YOUR_API_KEY" + order: 0 + title: "API Key" + airbyte_secret: true source-brevo: type: "object" required: @@ -84836,6 +97303,171 @@ components: title: "Start date" format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-finage: + type: "object" + required: + - "api_key" + - "symbols" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + symbols: + type: "array" + description: "List of symbols " + order: 1 + title: "Symbols" + tech_indicator_type: + type: "string" + description: "One of DEMA, EMA, SMA, WMA, RSI, TEMA, Williams, ADX " + enum: + - "DEMA" + - "EMA" + - "SMA" + - "WMA" + - "RSI" + - "TEMA" + - "Williams" + - "ADX" + order: 2 + title: "Technical Indicator Type" + default: "SMA" + time: + type: "string" + enum: + - "daily" + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + order: 3 + title: "Time Interval" + default: "daily" + period: + type: "string" + description: "Time period. Default is 10" + order: 4 + title: "Period" + time_aggregates: + type: "string" + description: "Size of the time" + enum: + - "minute" + - "hour" + - "day" + - "week" + - "month" + - "quarter" + - "year" + order: 5 + title: "Time aggregates" + default: "day" + time_period: + type: "string" + description: "Time Period for cash flow stmts" + enum: + - "annual" + - "quarter" + order: 6 + title: "Time Period" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 7 + sourceType: + title: "finage" + const: "finage" + enum: + - "finage" + order: 0 + type: "string" + source-finage-update: + type: "object" + required: + - "api_key" + - "symbols" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + symbols: + type: "array" + description: "List of symbols " + order: 1 + title: "Symbols" + tech_indicator_type: + type: "string" + description: "One of DEMA, EMA, SMA, WMA, RSI, TEMA, Williams, ADX " + enum: + - "DEMA" + - "EMA" + - "SMA" + - "WMA" + - "RSI" + - "TEMA" + - "Williams" + - "ADX" + order: 2 + title: "Technical Indicator Type" + default: "SMA" + time: + type: "string" + enum: + - "daily" + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + order: 3 + title: "Time Interval" + default: "daily" + period: + type: "string" + description: "Time period. Default is 10" + order: 4 + title: "Period" + time_aggregates: + type: "string" + description: "Size of the time" + enum: + - "minute" + - "hour" + - "day" + - "week" + - "month" + - "quarter" + - "year" + order: 5 + title: "Time aggregates" + default: "day" + time_period: + type: "string" + description: "Time Period for cash flow stmts" + enum: + - "annual" + - "quarter" + order: 6 + title: "Time Period" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 7 source-datascope: type: "object" required: @@ -85471,6 +98103,45 @@ components: - "1day" - "1week" - "1month" + source-smartreach: + type: "object" + required: + - "api_key" + - "teamid" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + teamid: + type: "number" + title: "TeamID" + order: 1 + sourceType: + title: "smartreach" + const: "smartreach" + enum: + - "smartreach" + order: 0 + type: "string" + source-smartreach-update: + type: "object" + required: + - "api_key" + - "teamid" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + teamid: + type: "number" + title: "TeamID" + order: 1 source-monday: title: "Monday Spec" type: "object" @@ -85609,6 +98280,170 @@ components: title: "Personal API Token" description: "API Token for making authenticated requests." airbyte_secret: true + source-onepagecrm: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + description: "Enter the user ID of your API app" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API Key of your API app" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "onepagecrm" + const: "onepagecrm" + enum: + - "onepagecrm" + order: 0 + type: "string" + source-onepagecrm-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + description: "Enter the user ID of your API app" + order: 0 + title: "Username" + password: + type: "string" + description: "Enter your API Key of your API app" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + source-financial-modelling: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + exchange: + type: "string" + description: + "The stock exchange : AMEX, AMS, AQS, ASX, ATH, BER, BME, BRU,\ + \ BSE, BUD, BUE, BVC, CAI, CBOE, CNQ, CPH, DFM, DOH, DUS, DXE, EGX, EURONEXT,\ + \ HAM, HEL, HKSE, ICE, IOB, IST, JKT, JNB, JPX, KLS, KOE, KSC, KUW, LSE,\ + \ MCX, MEX, MIL, MUN, NASDAQ, NEO, NSE, NYSE, NZE, OEM, OQX, OSL, OTC,\ + \ PNK, PRA, RIS, SAO, SAU, SES, SET, SGO, SHH, SHZ, SIX, STO, STU, TAI,\ + \ TLV, TSX, TSXV, TWO, VIE, VSE, WSE, XETRA" + order: 1 + title: "Exchange" + default: "NASDAQ" + marketcapmorethan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap more\ + \ than the give marketcap" + order: 2 + title: "Market Cap More Than" + marketcaplowerthan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap lower\ + \ than the give marketcap" + order: 3 + title: "Market Cap Lower Than" + time_frame: + type: "string" + description: "For example 1min, 5min, 15min, 30min, 1hour, 4hour" + order: 4 + title: "Time Frame" + default: "4hour" + enum: + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + sourceType: + title: "financial-modelling" + const: "financial-modelling" + enum: + - "financial-modelling" + order: 0 + type: "string" + source-financial-modelling-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + exchange: + type: "string" + description: + "The stock exchange : AMEX, AMS, AQS, ASX, ATH, BER, BME, BRU,\ + \ BSE, BUD, BUE, BVC, CAI, CBOE, CNQ, CPH, DFM, DOH, DUS, DXE, EGX, EURONEXT,\ + \ HAM, HEL, HKSE, ICE, IOB, IST, JKT, JNB, JPX, KLS, KOE, KSC, KUW, LSE,\ + \ MCX, MEX, MIL, MUN, NASDAQ, NEO, NSE, NYSE, NZE, OEM, OQX, OSL, OTC,\ + \ PNK, PRA, RIS, SAO, SAU, SES, SET, SGO, SHH, SHZ, SIX, STO, STU, TAI,\ + \ TLV, TSX, TSXV, TWO, VIE, VSE, WSE, XETRA" + order: 1 + title: "Exchange" + default: "NASDAQ" + marketcapmorethan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap more\ + \ than the give marketcap" + order: 2 + title: "Market Cap More Than" + marketcaplowerthan: + type: "string" + description: + "Used in screener to filter out stocks with a market cap lower\ + \ than the give marketcap" + order: 3 + title: "Market Cap Lower Than" + time_frame: + type: "string" + description: "For example 1min, 5min, 15min, 30min, 1hour, 4hour" + order: 4 + title: "Time Frame" + default: "4hour" + enum: + - "1min" + - "5min" + - "15min" + - "30min" + - "1hour" + - "4hour" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 source-waiteraid: type: "object" required: @@ -86013,6 +98848,81 @@ components: description: "Identification token for app accessing data" airbyte_secret: true order: 1 + source-invoiceninja: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "invoiceninja" + const: "invoiceninja" + enum: + - "invoiceninja" + order: 0 + type: "string" + source-invoiceninja-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + source-sendpulse: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "sendpulse" + const: "sendpulse" + enum: + - "sendpulse" + order: 0 + type: "string" + source-sendpulse-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + name: "client_id" + order: 0 + title: "OAuth Client ID" + airbyte_secret: true + client_secret: + type: "string" + name: "client_secret" + order: 1 + title: "OAuth Client Secret" + airbyte_secret: true source-bigquery: title: "BigQuery Source Spec" type: "object" @@ -86289,6 +99199,39 @@ components: >here." airbyte_secret: true order: 0 + source-tickettailor: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://www.getdrip.com/user/edit" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "tickettailor" + const: "tickettailor" + enum: + - "tickettailor" + order: 0 + type: "string" + source-tickettailor-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://www.getdrip.com/user/edit" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-calendly: type: "object" required: @@ -86425,6 +99368,72 @@ components: type: "string" title: "Engine" description: "Engine name to connect to." + source-eventee: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it at https://admin.eventee.co/\ + \ in 'Settings -> Features'." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "eventee" + const: "eventee" + enum: + - "eventee" + order: 0 + type: "string" + source-eventee-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "API token to use. Generate it at https://admin.eventee.co/\ + \ in 'Settings -> Features'." + name: "api_token" + order: 0 + title: "API Token" + airbyte_secret: true + source-simfin: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "simfin" + const: "simfin" + enum: + - "simfin" + order: 0 + type: "string" + source-simfin-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true source-pivotal-tracker: title: "Pivotal Tracker Spec" type: "object" @@ -86507,6 +99516,127 @@ components: description: "Date from when the sync should start in epoch Unix timestamp" order: 2 title: "Start Date" + source-brex: + type: "object" + required: + - "user_token" + - "start_date" + - "sourceType" + properties: + user_token: + type: "string" + description: + "User token to authenticate API requests. Generate it from\ + \ your Brex dashboard under Developer > Settings." + name: "user_token" + title: "User Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "brex" + const: "brex" + enum: + - "brex" + order: 0 + type: "string" + source-brex-update: + type: "object" + required: + - "user_token" + - "start_date" + properties: + user_token: + type: "string" + description: + "User token to authenticate API requests. Generate it from\ + \ your Brex dashboard under Developer > Settings." + name: "user_token" + title: "User Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-fulcrum: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://web.fulcrumapp.com/settings/api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "fulcrum" + const: "fulcrum" + enum: + - "fulcrum" + order: 0 + type: "string" + source-fulcrum-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://web.fulcrumapp.com/settings/api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-bigmailer: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use. You can create and find it on the API key\ + \ management page in your BigMailer account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "bigmailer" + const: "bigmailer" + enum: + - "bigmailer" + order: 0 + type: "string" + source-bigmailer-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "API key to use. You can create and find it on the API key\ + \ management page in your BigMailer account." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true source-senseforce: type: "object" required: @@ -88224,6 +101354,21 @@ components: description: "The Client Secret of your Drift developer application." airbyte_secret: true title: null + snapchat-marketing: + properties: + client_id: + type: "string" + description: "The Client ID of your Snapchat developer application." + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret of your Snapchat developer application." + order: 1 + title: "Client Secret" + airbyte_secret: true + title: null gitlab: properties: credentials: @@ -88415,9 +101560,9 @@ components: "The client ID of your Amazon Ads developer application. See\ \ the docs for more information." + airbyte_secret: true order: 1 type: "string" - airbyte_secret: true client_secret: title: "Client Secret" description: @@ -88427,7 +101572,7 @@ components: airbyte_secret: true order: 2 type: "string" - title: "Amazon Ads Spec" + title: "Source Amazon Ads" github: properties: credentials: @@ -88577,20 +101722,6 @@ components: >here." airbyte_secret: true title: "Google Search Console Spec" - retently: - properties: - credentials: - properties: - client_id: - title: "Client ID" - type: "string" - description: "The Client ID of your Retently developer application." - client_secret: - title: "Client Secret" - type: "string" - description: "The Client Secret of your Retently developer application." - airbyte_secret: true - title: "Retently Api Spec" instagram: properties: client_id: @@ -88606,21 +101737,6 @@ components: airbyte_hidden: true type: "string" title: "Source Instagram" - zendesk-sunshine: - properties: - credentials: - properties: - client_id: - type: "string" - title: "Client ID" - description: "The Client ID of your OAuth application." - airbyte_secret: true - client_secret: - type: "string" - title: "Client Secret" - description: "The Client Secret of your OAuth application." - airbyte_secret: true - title: null snowflake: properties: credentials: @@ -102339,29 +115455,34 @@ components: - "destinationType" properties: motherduck_api_key: - title: "MotherDuck API Key" + title: "MotherDuck Access Token" type: "string" - description: "API key to use for authentication to a MotherDuck database." + description: + "API access token to use for authentication to a MotherDuck\ + \ database." airbyte_secret: true x-speakeasy-param-sensitive: true destination_path: title: "Destination DB" type: "string" description: - "Path to the .duckdb file, or the text 'md:' to connect to\ - \ MotherDuck. The file will be placed inside that local mount. For more\ - \ information check out our docs" + "Path to a .duckdb file or 'md:' to connect\ + \ to a MotherDuck database. If 'md:' is specified without a database name,\ + \ the default MotherDuck database name ('my_db') will be used." examples: - "/local/destination.duckdb" - "md:" - - "motherduck:" + - "md:data_db" + - "md:my_db" default: "md:" schema: - title: "Destination Schema" + title: "Schema Name" type: "string" - description: "Database schema name, default for duckdb is 'main'." - example: "main" + description: "Database schema name, defaults to 'main' if not specified." + examples: + - "main" + - "airbyte_raw" + - "my_schema" destinationType: title: "motherduck" const: "motherduck" @@ -102376,28 +115497,33 @@ components: - "motherduck_api_key" properties: motherduck_api_key: - title: "MotherDuck API Key" + title: "MotherDuck Access Token" type: "string" - description: "API key to use for authentication to a MotherDuck database." + description: + "API access token to use for authentication to a MotherDuck\ + \ database." airbyte_secret: true destination_path: title: "Destination DB" type: "string" description: - "Path to the .duckdb file, or the text 'md:' to connect to\ - \ MotherDuck. The file will be placed inside that local mount. For more\ - \ information check out our docs" + "Path to a .duckdb file or 'md:' to connect\ + \ to a MotherDuck database. If 'md:' is specified without a database name,\ + \ the default MotherDuck database name ('my_db') will be used." examples: - "/local/destination.duckdb" - "md:" - - "motherduck:" + - "md:data_db" + - "md:my_db" default: "md:" schema: - title: "Destination Schema" + title: "Schema Name" type: "string" - description: "Database schema name, default for duckdb is 'main'." - example: "main" + description: "Database schema name, defaults to 'main' if not specified." + examples: + - "main" + - "airbyte_raw" + - "my_schema" destination-s3: title: "S3 Destination Spec" type: "object" @@ -107325,6 +120451,32 @@ components: type: "string" x-speakeasy-entity: Source_Activecampaign x-speakeasy-param-suppress-computed-diff: true + SourceAgilecrmCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-agilecrm" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Agilecrm + x-speakeasy-param-suppress-computed-diff: true SourceAhaCreateRequest: required: - name @@ -107429,6 +120581,32 @@ components: type: "string" x-speakeasy-entity: Source_Airtable x-speakeasy-param-suppress-computed-diff: true + SourceAkeneoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-akeneo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Akeneo + x-speakeasy-param-suppress-computed-diff: true SourceAlgoliaCreateRequest: required: - name @@ -107689,6 +120867,32 @@ components: type: "string" x-speakeasy-entity: Source_AppleSearchAds x-speakeasy-param-suppress-computed-diff: true + SourceApptivoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-apptivo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Apptivo + x-speakeasy-param-suppress-computed-diff: true SourceAsanaCreateRequest: required: - name @@ -107923,6 +121127,32 @@ components: type: "string" x-speakeasy-entity: Source_Beamer x-speakeasy-param-suppress-computed-diff: true + SourceBigmailerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-bigmailer" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Bigmailer + x-speakeasy-param-suppress-computed-diff: true SourceBigqueryCreateRequest: required: - name @@ -108001,6 +121231,58 @@ components: type: "string" x-speakeasy-entity: Source_Bitly x-speakeasy-param-suppress-computed-diff: true + SourceBloggerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-blogger" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Blogger + x-speakeasy-param-suppress-computed-diff: true + SourceBoxCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-box" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Box + x-speakeasy-param-suppress-computed-diff: true SourceBraintreeCreateRequest: required: - name @@ -108105,6 +121387,32 @@ components: type: "string" x-speakeasy-entity: Source_Brevo x-speakeasy-param-suppress-computed-diff: true + SourceBrexCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-brex" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Brex + x-speakeasy-param-suppress-computed-diff: true SourceBugsnagCreateRequest: required: - name @@ -108157,6 +121465,32 @@ components: type: "string" x-speakeasy-entity: Source_Buildkite x-speakeasy-param-suppress-computed-diff: true + SourceBunnyIncCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-bunny-inc" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_BunnyInc + x-speakeasy-param-suppress-computed-diff: true SourceBuzzsproutCreateRequest: required: - name @@ -108183,6 +121517,32 @@ components: type: "string" x-speakeasy-entity: Source_Buzzsprout x-speakeasy-param-suppress-computed-diff: true + SourceCalComCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-cal-com" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_CalCom + x-speakeasy-param-suppress-computed-diff: true SourceCalendlyCreateRequest: required: - name @@ -108261,6 +121621,32 @@ components: type: "string" x-speakeasy-entity: Source_CampaignMonitor x-speakeasy-param-suppress-computed-diff: true + SourceCampaynCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-campayn" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Campayn + x-speakeasy-param-suppress-computed-diff: true SourceCannyCreateRequest: required: - name @@ -108287,6 +121673,32 @@ components: type: "string" x-speakeasy-entity: Source_Canny x-speakeasy-param-suppress-computed-diff: true + SourceCapsuleCrmCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-capsule-crm" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_CapsuleCrm + x-speakeasy-param-suppress-computed-diff: true SourceCareQualityCommissionCreateRequest: required: - name @@ -108521,6 +121933,32 @@ components: type: "string" x-speakeasy-entity: Source_Cimis x-speakeasy-param-suppress-computed-diff: true + SourceCin7CreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-cin7" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Cin7 + x-speakeasy-param-suppress-computed-diff: true SourceCircaCreateRequest: required: - name @@ -108703,6 +122141,32 @@ components: type: "string" x-speakeasy-entity: Source_Clockify x-speakeasy-param-suppress-computed-diff: true + SourceClockodoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-clockodo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Clockodo + x-speakeasy-param-suppress-computed-diff: true SourceCloseComCreateRequest: required: - name @@ -108729,6 +122193,32 @@ components: type: "string" x-speakeasy-entity: Source_CloseCom x-speakeasy-param-suppress-computed-diff: true + SourceCloudbedsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-cloudbeds" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Cloudbeds + x-speakeasy-param-suppress-computed-diff: true SourceCoassembleCreateRequest: required: - name @@ -109171,6 +122661,32 @@ components: type: "string" x-speakeasy-entity: Source_Delighted x-speakeasy-param-suppress-computed-diff: true + SourceDeputyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-deputy" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Deputy + x-speakeasy-param-suppress-computed-diff: true SourceDixaCreateRequest: required: - name @@ -109353,6 +122869,32 @@ components: type: "string" x-speakeasy-entity: Source_Dynamodb x-speakeasy-param-suppress-computed-diff: true + SourceEConomicCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-e-conomic" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_EConomic + x-speakeasy-param-suppress-computed-diff: true SourceEasypostCreateRequest: required: - name @@ -109405,6 +122947,32 @@ components: type: "string" x-speakeasy-entity: Source_Easypromos x-speakeasy-param-suppress-computed-diff: true + SourceElasticemailCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-elasticemail" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Elasticemail + x-speakeasy-param-suppress-computed-diff: true SourceEmailoctopusCreateRequest: required: - name @@ -109457,6 +123025,32 @@ components: type: "string" x-speakeasy-entity: Source_EmploymentHero x-speakeasy-param-suppress-computed-diff: true + SourceEnchargeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-encharge" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Encharge + x-speakeasy-param-suppress-computed-diff: true SourceEventbriteCreateRequest: required: - name @@ -109483,6 +123077,58 @@ components: type: "string" x-speakeasy-entity: Source_Eventbrite x-speakeasy-param-suppress-computed-diff: true + SourceEventeeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-eventee" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Eventee + x-speakeasy-param-suppress-computed-diff: true + SourceEventzillaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-eventzilla" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Eventzilla + x-speakeasy-param-suppress-computed-diff: true SourceExchangeRatesCreateRequest: required: - name @@ -109665,6 +123311,136 @@ components: type: "string" x-speakeasy-entity: Source_File x-speakeasy-param-suppress-computed-diff: true + SourceFilloutCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-fillout" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Fillout + x-speakeasy-param-suppress-computed-diff: true + SourceFinageCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-finage" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Finage + x-speakeasy-param-suppress-computed-diff: true + SourceFinancialModellingCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-financial-modelling" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_FinancialModelling + x-speakeasy-param-suppress-computed-diff: true + SourceFinnhubCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-finnhub" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Finnhub + x-speakeasy-param-suppress-computed-diff: true + SourceFinnworldsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-finnworlds" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Finnworlds + x-speakeasy-param-suppress-computed-diff: true SourceFireboltCreateRequest: required: - name @@ -109691,6 +123467,32 @@ components: type: "string" x-speakeasy-entity: Source_Firebolt x-speakeasy-param-suppress-computed-diff: true + SourceFirehydrantCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-firehydrant" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Firehydrant + x-speakeasy-param-suppress-computed-diff: true SourceFleetioCreateRequest: required: - name @@ -109717,6 +123519,32 @@ components: type: "string" x-speakeasy-entity: Source_Fleetio x-speakeasy-param-suppress-computed-diff: true + SourceFlexmailCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-flexmail" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Flexmail + x-speakeasy-param-suppress-computed-diff: true SourceFlexportCreateRequest: required: - name @@ -109769,6 +123597,58 @@ components: type: "string" x-speakeasy-entity: Source_Float x-speakeasy-param-suppress-computed-diff: true + SourceFlowluCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-flowlu" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Flowlu + x-speakeasy-param-suppress-computed-diff: true + SourceFormbricksCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-formbricks" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Formbricks + x-speakeasy-param-suppress-computed-diff: true SourceFreeAgentConnectorCreateRequest: required: - name @@ -109795,6 +123675,58 @@ components: type: "string" x-speakeasy-entity: Source_FreeAgentConnector x-speakeasy-param-suppress-computed-diff: true + SourceFreightviewCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-freightview" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Freightview + x-speakeasy-param-suppress-computed-diff: true + SourceFreshbooksCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-freshbooks" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Freshbooks + x-speakeasy-param-suppress-computed-diff: true SourceFreshcallerCreateRequest: required: - name @@ -109951,6 +123883,32 @@ components: type: "string" x-speakeasy-entity: Source_Front x-speakeasy-param-suppress-computed-diff: true + SourceFulcrumCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-fulcrum" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Fulcrum + x-speakeasy-param-suppress-computed-diff: true SourceGainsightPxCreateRequest: required: - name @@ -110003,6 +123961,32 @@ components: type: "string" x-speakeasy-entity: Source_Gcs x-speakeasy-param-suppress-computed-diff: true + SourceGetgistCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-getgist" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Getgist + x-speakeasy-param-suppress-computed-diff: true SourceGetlagoCreateRequest: required: - name @@ -110029,6 +124013,32 @@ components: type: "string" x-speakeasy-entity: Source_Getlago x-speakeasy-param-suppress-computed-diff: true + SourceGitbookCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-gitbook" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Gitbook + x-speakeasy-param-suppress-computed-diff: true SourceGithubCreateRequest: required: - name @@ -110315,6 +124325,32 @@ components: type: "string" x-speakeasy-entity: Source_GoogleCalendar x-speakeasy-param-suppress-computed-diff: true + SourceGoogleClassroomCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-classroom" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleClassroom + x-speakeasy-param-suppress-computed-diff: true SourceGoogleDirectoryCreateRequest: required: - name @@ -110367,6 +124403,32 @@ components: type: "string" x-speakeasy-entity: Source_GoogleDrive x-speakeasy-param-suppress-computed-diff: true + SourceGoogleFormsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-forms" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleForms + x-speakeasy-param-suppress-computed-diff: true SourceGooglePagespeedInsightsCreateRequest: required: - name @@ -110809,6 +124871,32 @@ components: type: "string" x-speakeasy-entity: Source_Hubspot x-speakeasy-param-suppress-computed-diff: true + SourceHumanitixCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-humanitix" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Humanitix + x-speakeasy-param-suppress-computed-diff: true SourceIlluminaBasespaceCreateRequest: required: - name @@ -110861,6 +124949,32 @@ components: type: "string" x-speakeasy-entity: Source_IncidentIo x-speakeasy-param-suppress-computed-diff: true + SourceInflowinventoryCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-inflowinventory" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Inflowinventory + x-speakeasy-param-suppress-computed-diff: true SourceInsightlyCreateRequest: required: - name @@ -110991,6 +125105,32 @@ components: type: "string" x-speakeasy-entity: Source_Invoiced x-speakeasy-param-suppress-computed-diff: true + SourceInvoiceninjaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-invoiceninja" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Invoiceninja + x-speakeasy-param-suppress-computed-diff: true SourceIp2whoisCreateRequest: required: - name @@ -111069,6 +125209,32 @@ components: type: "string" x-speakeasy-entity: Source_Jira x-speakeasy-param-suppress-computed-diff: true + SourceJobnimbusCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-jobnimbus" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Jobnimbus + x-speakeasy-param-suppress-computed-diff: true SourceJotformCreateRequest: required: - name @@ -111433,6 +125599,32 @@ components: type: "string" x-speakeasy-entity: Source_Lemlist x-speakeasy-param-suppress-computed-diff: true + SourceLessAnnoyingCrmCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-less-annoying-crm" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_LessAnnoyingCrm + x-speakeasy-param-suppress-computed-diff: true SourceLeverHiringCreateRequest: required: - name @@ -111459,6 +125651,32 @@ components: type: "string" x-speakeasy-entity: Source_LeverHiring x-speakeasy-param-suppress-computed-diff: true + SourceLightspeedRetailCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-lightspeed-retail" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_LightspeedRetail + x-speakeasy-param-suppress-computed-diff: true SourceLinkedinAdsCreateRequest: required: - name @@ -111771,6 +125989,58 @@ components: type: "string" x-speakeasy-entity: Source_MailjetSms x-speakeasy-param-suppress-computed-diff: true + SourceMailosaurCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mailosaur" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Mailosaur + x-speakeasy-param-suppress-computed-diff: true + SourceMailtrapCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mailtrap" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Mailtrap + x-speakeasy-param-suppress-computed-diff: true SourceMarketoCreateRequest: required: - name @@ -111797,6 +126067,32 @@ components: type: "string" x-speakeasy-entity: Source_Marketo x-speakeasy-param-suppress-computed-diff: true + SourceMarketstackCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-marketstack" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Marketstack + x-speakeasy-param-suppress-computed-diff: true SourceMentionCreateRequest: required: - name @@ -112395,6 +126691,58 @@ components: type: "string" x-speakeasy-entity: Source_NewsApi x-speakeasy-param-suppress-computed-diff: true + SourceNewsdataIoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-newsdata-io" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_NewsdataIo + x-speakeasy-param-suppress-computed-diff: true + SourceNocrmCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-nocrm" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Nocrm + x-speakeasy-param-suppress-computed-diff: true SourceNorthpassLmsCreateRequest: required: - name @@ -112447,6 +126795,32 @@ components: type: "string" x-speakeasy-entity: Source_Notion x-speakeasy-param-suppress-computed-diff: true + SourceNutshellCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-nutshell" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Nutshell + x-speakeasy-param-suppress-computed-diff: true SourceNylasCreateRequest: required: - name @@ -112551,6 +126925,58 @@ components: type: "string" x-speakeasy-entity: Source_Omnisend x-speakeasy-param-suppress-computed-diff: true + SourceOncehubCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-oncehub" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Oncehub + x-speakeasy-param-suppress-computed-diff: true + SourceOnepagecrmCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-onepagecrm" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Onepagecrm + x-speakeasy-param-suppress-computed-diff: true SourceOnesignalCreateRequest: required: - name @@ -112577,6 +127003,32 @@ components: type: "string" x-speakeasy-entity: Source_Onesignal x-speakeasy-param-suppress-computed-diff: true + SourceOnfleetCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-onfleet" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Onfleet + x-speakeasy-param-suppress-computed-diff: true SourceOpenDataDcCreateRequest: required: - name @@ -112603,6 +127055,58 @@ components: type: "string" x-speakeasy-entity: Source_OpenDataDc x-speakeasy-param-suppress-computed-diff: true + SourceOpenaqCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-openaq" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Openaq + x-speakeasy-param-suppress-computed-diff: true + SourceOpenfdaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-openfda" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Openfda + x-speakeasy-param-suppress-computed-diff: true SourceOpenweatherCreateRequest: required: - name @@ -112629,6 +127133,32 @@ components: type: "string" x-speakeasy-entity: Source_Openweather x-speakeasy-param-suppress-computed-diff: true + SourceOpinionStageCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-opinion-stage" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_OpinionStage + x-speakeasy-param-suppress-computed-diff: true SourceOpsgenieCreateRequest: required: - name @@ -112811,6 +127341,58 @@ components: type: "string" x-speakeasy-entity: Source_Outreach x-speakeasy-param-suppress-computed-diff: true + SourceOveitCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-oveit" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Oveit + x-speakeasy-param-suppress-computed-diff: true + SourcePabblySubscriptionsBillingCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pabbly-subscriptions-billing" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_PabblySubscriptionsBilling + x-speakeasy-param-suppress-computed-diff: true SourcePandadocCreateRequest: required: - name @@ -112837,6 +127419,58 @@ components: type: "string" x-speakeasy-entity: Source_Pandadoc x-speakeasy-param-suppress-computed-diff: true + SourcePaperformCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-paperform" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Paperform + x-speakeasy-param-suppress-computed-diff: true + SourcePapersignCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-papersign" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Papersign + x-speakeasy-param-suppress-computed-diff: true SourcePardotCreateRequest: required: - name @@ -113123,6 +127757,32 @@ components: type: "string" x-speakeasy-entity: Source_Pipedrive x-speakeasy-param-suppress-computed-diff: true + SourcePipelinerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pipeliner" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pipeliner + x-speakeasy-param-suppress-computed-diff: true SourcePivotalTrackerCreateRequest: required: - name @@ -113435,6 +128095,32 @@ components: type: "string" x-speakeasy-entity: Source_Prestashop x-speakeasy-param-suppress-computed-diff: true + SourcePretixCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pretix" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pretix + x-speakeasy-param-suppress-computed-diff: true SourcePrimetricCreateRequest: required: - name @@ -113851,6 +128537,32 @@ components: type: "string" x-speakeasy-entity: Source_Rentcast x-speakeasy-param-suppress-computed-diff: true + SourceRepairshoprCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-repairshopr" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Repairshopr + x-speakeasy-param-suppress-computed-diff: true SourceReplyIoCreateRequest: required: - name @@ -113929,6 +128641,32 @@ components: type: "string" x-speakeasy-entity: Source_Revenuecat x-speakeasy-param-suppress-computed-diff: true + SourceRevolutMerchantCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-revolut-merchant" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_RevolutMerchant + x-speakeasy-param-suppress-computed-diff: true SourceRkiCovidCreateRequest: required: - name @@ -113955,6 +128693,32 @@ components: type: "string" x-speakeasy-entity: Source_RkiCovid x-speakeasy-param-suppress-computed-diff: true + SourceRocketlaneCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-rocketlane" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Rocketlane + x-speakeasy-param-suppress-computed-diff: true SourceRollbarCreateRequest: required: - name @@ -114033,6 +128797,32 @@ components: type: "string" x-speakeasy-entity: Source_Rss x-speakeasy-param-suppress-computed-diff: true + SourceRuddrCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-ruddr" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Ruddr + x-speakeasy-param-suppress-computed-diff: true SourceS3CreateRequest: required: - name @@ -114111,6 +128901,32 @@ components: type: "string" x-speakeasy-entity: Source_SageHr x-speakeasy-param-suppress-computed-diff: true + SourceSalesflareCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-salesflare" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Salesflare + x-speakeasy-param-suppress-computed-diff: true SourceSalesforceCreateRequest: required: - name @@ -114345,6 +129161,58 @@ components: type: "string" x-speakeasy-entity: Source_Sendinblue x-speakeasy-param-suppress-computed-diff: true + SourceSendowlCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sendowl" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Sendowl + x-speakeasy-param-suppress-computed-diff: true + SourceSendpulseCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sendpulse" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Sendpulse + x-speakeasy-param-suppress-computed-diff: true SourceSenseforceCreateRequest: required: - name @@ -114475,6 +129343,32 @@ components: type: "string" x-speakeasy-entity: Source_Sharetribe x-speakeasy-param-suppress-computed-diff: true + SourceShippoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-shippo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Shippo + x-speakeasy-param-suppress-computed-diff: true SourceShopifyCreateRequest: required: - name @@ -114579,6 +129473,32 @@ components: type: "string" x-speakeasy-entity: Source_SigmaComputing x-speakeasy-param-suppress-computed-diff: true + SourceSimfinCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-simfin" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Simfin + x-speakeasy-param-suppress-computed-diff: true SourceSimplecastCreateRequest: required: - name @@ -114709,6 +129629,32 @@ components: type: "string" x-speakeasy-entity: Source_Smartengage x-speakeasy-param-suppress-computed-diff: true + SourceSmartreachCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-smartreach" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Smartreach + x-speakeasy-param-suppress-computed-diff: true SourceSmartsheetsCreateRequest: required: - name @@ -114943,6 +129889,32 @@ components: type: "string" x-speakeasy-entity: Source_SplitIo x-speakeasy-param-suppress-computed-diff: true + SourceSpotlercrmCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-spotlercrm" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Spotlercrm + x-speakeasy-param-suppress-computed-diff: true SourceSquareCreateRequest: required: - name @@ -115047,6 +130019,32 @@ components: type: "string" x-speakeasy-entity: Source_Statuspage x-speakeasy-param-suppress-computed-diff: true + SourceStockdataCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-stockdata" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Stockdata + x-speakeasy-param-suppress-computed-diff: true SourceStravaCreateRequest: required: - name @@ -115177,6 +130175,58 @@ components: type: "string" x-speakeasy-entity: Source_Survicate x-speakeasy-param-suppress-computed-diff: true + SourceSystemeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-systeme" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Systeme + x-speakeasy-param-suppress-computed-diff: true + SourceTaboolaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-taboola" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Taboola + x-speakeasy-param-suppress-computed-diff: true SourceTeamtailorCreateRequest: required: - name @@ -115359,6 +130409,32 @@ components: type: "string" x-speakeasy-entity: Source_Ticketmaster x-speakeasy-param-suppress-computed-diff: true + SourceTickettailorCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-tickettailor" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Tickettailor + x-speakeasy-param-suppress-computed-diff: true SourceTiktokMarketingCreateRequest: required: - name @@ -115411,6 +130487,32 @@ components: type: "string" x-speakeasy-entity: Source_Timely x-speakeasy-param-suppress-computed-diff: true + SourceTinyemailCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-tinyemail" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Tinyemail + x-speakeasy-param-suppress-computed-diff: true SourceTodoistCreateRequest: required: - name @@ -115437,6 +130539,32 @@ components: type: "string" x-speakeasy-entity: Source_Todoist x-speakeasy-param-suppress-computed-diff: true + SourceTrackPmsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-track-pms" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_TrackPms + x-speakeasy-param-suppress-computed-diff: true SourceTrelloCreateRequest: required: - name @@ -115463,6 +130591,32 @@ components: type: "string" x-speakeasy-entity: Source_Trello x-speakeasy-param-suppress-computed-diff: true + SourceTremendousCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-tremendous" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Tremendous + x-speakeasy-param-suppress-computed-diff: true SourceTrustpilotCreateRequest: required: - name @@ -115645,6 +130799,32 @@ components: type: "string" x-speakeasy-entity: Source_Typeform x-speakeasy-param-suppress-computed-diff: true + SourceUbidotsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-ubidots" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Ubidots + x-speakeasy-param-suppress-computed-diff: true SourceUnleashCreateRequest: required: - name @@ -115801,6 +130981,32 @@ components: type: "string" x-speakeasy-entity: Source_Veeqo x-speakeasy-param-suppress-computed-diff: true + SourceVercelCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-vercel" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Vercel + x-speakeasy-param-suppress-computed-diff: true SourceVismaEconomicCreateRequest: required: - name @@ -115957,6 +131163,32 @@ components: type: "string" x-speakeasy-entity: Source_Weatherstack x-speakeasy-param-suppress-computed-diff: true + SourceWebScrapperCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-web-scrapper" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_WebScrapper + x-speakeasy-param-suppress-computed-diff: true SourceWebflowCreateRequest: required: - name @@ -116217,6 +131449,32 @@ components: type: "string" x-speakeasy-entity: Source_Wrike x-speakeasy-param-suppress-computed-diff: true + SourceWufooCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-wufoo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Wufoo + x-speakeasy-param-suppress-computed-diff: true SourceXkcdCreateRequest: required: - name @@ -116399,6 +131657,32 @@ components: type: "string" x-speakeasy-entity: Source_YoutubeAnalytics x-speakeasy-param-suppress-computed-diff: true + SourceYoutubeDataCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-youtube-data" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_YoutubeData + x-speakeasy-param-suppress-computed-diff: true SourceZapierSupportedStorageCreateRequest: required: - name @@ -116581,6 +131865,84 @@ components: type: "string" x-speakeasy-entity: Source_Zenloop x-speakeasy-param-suppress-computed-diff: true + SourceZohoAnalyticsMetadataApiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoho-analytics-metadata-api" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZohoAnalyticsMetadataApi + x-speakeasy-param-suppress-computed-diff: true + SourceZohoBiginCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoho-bigin" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZohoBigin + x-speakeasy-param-suppress-computed-diff: true + SourceZohoBillingCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoho-billing" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZohoBilling + x-speakeasy-param-suppress-computed-diff: true SourceZohoBooksCreateRequest: required: - name @@ -116607,6 +131969,32 @@ components: type: "string" x-speakeasy-entity: Source_ZohoBooks x-speakeasy-param-suppress-computed-diff: true + SourceZohoCampaignCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoho-campaign" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZohoCampaign + x-speakeasy-param-suppress-computed-diff: true SourceZohoCrmCreateRequest: required: - name @@ -116633,6 +132021,58 @@ components: type: "string" x-speakeasy-entity: Source_ZohoCrm x-speakeasy-param-suppress-computed-diff: true + SourceZohoDeskCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoho-desk" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZohoDesk + x-speakeasy-param-suppress-computed-diff: true + SourceZohoExpenseCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoho-expense" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZohoExpense + x-speakeasy-param-suppress-computed-diff: true SourceZohoInventoryCreateRequest: required: - name @@ -116659,6 +132099,58 @@ components: type: "string" x-speakeasy-entity: Source_ZohoInventory x-speakeasy-param-suppress-computed-diff: true + SourceZohoInvoiceCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoho-invoice" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZohoInvoice + x-speakeasy-param-suppress-computed-diff: true + SourceZonkaFeedbackCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zonka-feedback" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZonkaFeedback + x-speakeasy-param-suppress-computed-diff: true SourceZoomCreateRequest: required: - name @@ -117604,6 +133096,22 @@ components: $ref: "#/components/schemas/source-activecampaign-update" x-speakeasy-entity: Source_Activecampaign x-speakeasy-param-suppress-computed-diff: true + SourceAgilecrmPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-agilecrm-update" + x-speakeasy-entity: Source_Agilecrm + x-speakeasy-param-suppress-computed-diff: true SourceAhaPutRequest: required: - "name" @@ -117668,6 +133176,22 @@ components: $ref: "#/components/schemas/source-airtable-update" x-speakeasy-entity: Source_Airtable x-speakeasy-param-suppress-computed-diff: true + SourceAkeneoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-akeneo-update" + x-speakeasy-entity: Source_Akeneo + x-speakeasy-param-suppress-computed-diff: true SourceAlgoliaPutRequest: required: - "name" @@ -117828,6 +133352,22 @@ components: $ref: "#/components/schemas/source-apple-search-ads-update" x-speakeasy-entity: Source_AppleSearchAds x-speakeasy-param-suppress-computed-diff: true + SourceApptivoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-apptivo-update" + x-speakeasy-entity: Source_Apptivo + x-speakeasy-param-suppress-computed-diff: true SourceAsanaPutRequest: required: - "name" @@ -117972,6 +133512,22 @@ components: $ref: "#/components/schemas/source-beamer-update" x-speakeasy-entity: Source_Beamer x-speakeasy-param-suppress-computed-diff: true + SourceBigmailerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-bigmailer-update" + x-speakeasy-entity: Source_Bigmailer + x-speakeasy-param-suppress-computed-diff: true SourceBigqueryPutRequest: required: - "name" @@ -118020,6 +133576,38 @@ components: $ref: "#/components/schemas/source-bitly-update" x-speakeasy-entity: Source_Bitly x-speakeasy-param-suppress-computed-diff: true + SourceBloggerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-blogger-update" + x-speakeasy-entity: Source_Blogger + x-speakeasy-param-suppress-computed-diff: true + SourceBoxPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-box-update" + x-speakeasy-entity: Source_Box + x-speakeasy-param-suppress-computed-diff: true SourceBraintreePutRequest: required: - "name" @@ -118084,6 +133672,22 @@ components: $ref: "#/components/schemas/source-brevo-update" x-speakeasy-entity: Source_Brevo x-speakeasy-param-suppress-computed-diff: true + SourceBrexPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-brex-update" + x-speakeasy-entity: Source_Brex + x-speakeasy-param-suppress-computed-diff: true SourceBugsnagPutRequest: required: - "name" @@ -118116,6 +133720,22 @@ components: $ref: "#/components/schemas/source-buildkite-update" x-speakeasy-entity: Source_Buildkite x-speakeasy-param-suppress-computed-diff: true + SourceBunnyIncPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-bunny-inc-update" + x-speakeasy-entity: Source_BunnyInc + x-speakeasy-param-suppress-computed-diff: true SourceBuzzsproutPutRequest: required: - "name" @@ -118132,6 +133752,22 @@ components: $ref: "#/components/schemas/source-buzzsprout-update" x-speakeasy-entity: Source_Buzzsprout x-speakeasy-param-suppress-computed-diff: true + SourceCalComPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-cal-com-update" + x-speakeasy-entity: Source_CalCom + x-speakeasy-param-suppress-computed-diff: true SourceCalendlyPutRequest: required: - "name" @@ -118180,6 +133816,22 @@ components: $ref: "#/components/schemas/source-campaign-monitor-update" x-speakeasy-entity: Source_CampaignMonitor x-speakeasy-param-suppress-computed-diff: true + SourceCampaynPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-campayn-update" + x-speakeasy-entity: Source_Campayn + x-speakeasy-param-suppress-computed-diff: true SourceCannyPutRequest: required: - "name" @@ -118196,6 +133848,22 @@ components: $ref: "#/components/schemas/source-canny-update" x-speakeasy-entity: Source_Canny x-speakeasy-param-suppress-computed-diff: true + SourceCapsuleCrmPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-capsule-crm-update" + x-speakeasy-entity: Source_CapsuleCrm + x-speakeasy-param-suppress-computed-diff: true SourceCareQualityCommissionPutRequest: required: - "name" @@ -118340,6 +134008,22 @@ components: $ref: "#/components/schemas/source-cimis-update" x-speakeasy-entity: Source_Cimis x-speakeasy-param-suppress-computed-diff: true + SourceCin7PutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-cin7-update" + x-speakeasy-entity: Source_Cin7 + x-speakeasy-param-suppress-computed-diff: true SourceCircaPutRequest: required: - "name" @@ -118452,6 +134136,22 @@ components: $ref: "#/components/schemas/source-clockify-update" x-speakeasy-entity: Source_Clockify x-speakeasy-param-suppress-computed-diff: true + SourceClockodoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-clockodo-update" + x-speakeasy-entity: Source_Clockodo + x-speakeasy-param-suppress-computed-diff: true SourceCloseComPutRequest: required: - "name" @@ -118468,6 +134168,22 @@ components: $ref: "#/components/schemas/source-close-com-update" x-speakeasy-entity: Source_CloseCom x-speakeasy-param-suppress-computed-diff: true + SourceCloudbedsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-cloudbeds-update" + x-speakeasy-entity: Source_Cloudbeds + x-speakeasy-param-suppress-computed-diff: true SourceCoassemblePutRequest: required: - "name" @@ -118740,6 +134456,22 @@ components: $ref: "#/components/schemas/source-delighted-update" x-speakeasy-entity: Source_Delighted x-speakeasy-param-suppress-computed-diff: true + SourceDeputyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-deputy-update" + x-speakeasy-entity: Source_Deputy + x-speakeasy-param-suppress-computed-diff: true SourceDixaPutRequest: required: - "name" @@ -118852,6 +134584,22 @@ components: $ref: "#/components/schemas/source-dynamodb-update" x-speakeasy-entity: Source_Dynamodb x-speakeasy-param-suppress-computed-diff: true + SourceEConomicPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-e-conomic-update" + x-speakeasy-entity: Source_EConomic + x-speakeasy-param-suppress-computed-diff: true SourceEasypostPutRequest: required: - "name" @@ -118884,6 +134632,22 @@ components: $ref: "#/components/schemas/source-easypromos-update" x-speakeasy-entity: Source_Easypromos x-speakeasy-param-suppress-computed-diff: true + SourceElasticemailPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-elasticemail-update" + x-speakeasy-entity: Source_Elasticemail + x-speakeasy-param-suppress-computed-diff: true SourceEmailoctopusPutRequest: required: - "name" @@ -118916,6 +134680,22 @@ components: $ref: "#/components/schemas/source-employment-hero-update" x-speakeasy-entity: Source_EmploymentHero x-speakeasy-param-suppress-computed-diff: true + SourceEnchargePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-encharge-update" + x-speakeasy-entity: Source_Encharge + x-speakeasy-param-suppress-computed-diff: true SourceEventbritePutRequest: required: - "name" @@ -118932,6 +134712,38 @@ components: $ref: "#/components/schemas/source-eventbrite-update" x-speakeasy-entity: Source_Eventbrite x-speakeasy-param-suppress-computed-diff: true + SourceEventeePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-eventee-update" + x-speakeasy-entity: Source_Eventee + x-speakeasy-param-suppress-computed-diff: true + SourceEventzillaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-eventzilla-update" + x-speakeasy-entity: Source_Eventzilla + x-speakeasy-param-suppress-computed-diff: true SourceExchangeRatesPutRequest: required: - "name" @@ -119044,6 +134856,86 @@ components: $ref: "#/components/schemas/source-file-update" x-speakeasy-entity: Source_File x-speakeasy-param-suppress-computed-diff: true + SourceFilloutPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-fillout-update" + x-speakeasy-entity: Source_Fillout + x-speakeasy-param-suppress-computed-diff: true + SourceFinagePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-finage-update" + x-speakeasy-entity: Source_Finage + x-speakeasy-param-suppress-computed-diff: true + SourceFinancialModellingPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-financial-modelling-update" + x-speakeasy-entity: Source_FinancialModelling + x-speakeasy-param-suppress-computed-diff: true + SourceFinnhubPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-finnhub-update" + x-speakeasy-entity: Source_Finnhub + x-speakeasy-param-suppress-computed-diff: true + SourceFinnworldsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-finnworlds-update" + x-speakeasy-entity: Source_Finnworlds + x-speakeasy-param-suppress-computed-diff: true SourceFireboltPutRequest: required: - "name" @@ -119060,6 +134952,22 @@ components: $ref: "#/components/schemas/source-firebolt-update" x-speakeasy-entity: Source_Firebolt x-speakeasy-param-suppress-computed-diff: true + SourceFirehydrantPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-firehydrant-update" + x-speakeasy-entity: Source_Firehydrant + x-speakeasy-param-suppress-computed-diff: true SourceFleetioPutRequest: required: - "name" @@ -119076,6 +134984,22 @@ components: $ref: "#/components/schemas/source-fleetio-update" x-speakeasy-entity: Source_Fleetio x-speakeasy-param-suppress-computed-diff: true + SourceFlexmailPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-flexmail-update" + x-speakeasy-entity: Source_Flexmail + x-speakeasy-param-suppress-computed-diff: true SourceFlexportPutRequest: required: - "name" @@ -119108,6 +135032,38 @@ components: $ref: "#/components/schemas/source-float-update" x-speakeasy-entity: Source_Float x-speakeasy-param-suppress-computed-diff: true + SourceFlowluPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-flowlu-update" + x-speakeasy-entity: Source_Flowlu + x-speakeasy-param-suppress-computed-diff: true + SourceFormbricksPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-formbricks-update" + x-speakeasy-entity: Source_Formbricks + x-speakeasy-param-suppress-computed-diff: true SourceFreeAgentConnectorPutRequest: required: - "name" @@ -119124,6 +135080,38 @@ components: $ref: "#/components/schemas/source-free-agent-connector-update" x-speakeasy-entity: Source_FreeAgentConnector x-speakeasy-param-suppress-computed-diff: true + SourceFreightviewPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-freightview-update" + x-speakeasy-entity: Source_Freightview + x-speakeasy-param-suppress-computed-diff: true + SourceFreshbooksPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-freshbooks-update" + x-speakeasy-entity: Source_Freshbooks + x-speakeasy-param-suppress-computed-diff: true SourceFreshcallerPutRequest: required: - "name" @@ -119220,6 +135208,22 @@ components: $ref: "#/components/schemas/source-front-update" x-speakeasy-entity: Source_Front x-speakeasy-param-suppress-computed-diff: true + SourceFulcrumPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-fulcrum-update" + x-speakeasy-entity: Source_Fulcrum + x-speakeasy-param-suppress-computed-diff: true SourceGainsightPxPutRequest: required: - "name" @@ -119252,6 +135256,22 @@ components: $ref: "#/components/schemas/source-gcs-update" x-speakeasy-entity: Source_Gcs x-speakeasy-param-suppress-computed-diff: true + SourceGetgistPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-getgist-update" + x-speakeasy-entity: Source_Getgist + x-speakeasy-param-suppress-computed-diff: true SourceGetlagoPutRequest: required: - "name" @@ -119268,6 +135288,22 @@ components: $ref: "#/components/schemas/source-getlago-update" x-speakeasy-entity: Source_Getlago x-speakeasy-param-suppress-computed-diff: true + SourceGitbookPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-gitbook-update" + x-speakeasy-entity: Source_Gitbook + x-speakeasy-param-suppress-computed-diff: true SourceGithubPutRequest: required: - "name" @@ -119444,6 +135480,22 @@ components: $ref: "#/components/schemas/source-google-calendar-update" x-speakeasy-entity: Source_GoogleCalendar x-speakeasy-param-suppress-computed-diff: true + SourceGoogleClassroomPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-classroom-update" + x-speakeasy-entity: Source_GoogleClassroom + x-speakeasy-param-suppress-computed-diff: true SourceGoogleDirectoryPutRequest: required: - "name" @@ -119476,6 +135528,22 @@ components: $ref: "#/components/schemas/source-google-drive-update" x-speakeasy-entity: Source_GoogleDrive x-speakeasy-param-suppress-computed-diff: true + SourceGoogleFormsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-forms-update" + x-speakeasy-entity: Source_GoogleForms + x-speakeasy-param-suppress-computed-diff: true SourceGooglePagespeedInsightsPutRequest: required: - "name" @@ -119748,6 +135816,22 @@ components: $ref: "#/components/schemas/source-hubspot-update" x-speakeasy-entity: Source_Hubspot x-speakeasy-param-suppress-computed-diff: true + SourceHumanitixPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-humanitix-update" + x-speakeasy-entity: Source_Humanitix + x-speakeasy-param-suppress-computed-diff: true SourceIlluminaBasespacePutRequest: required: - "name" @@ -119780,6 +135864,22 @@ components: $ref: "#/components/schemas/source-incident-io-update" x-speakeasy-entity: Source_IncidentIo x-speakeasy-param-suppress-computed-diff: true + SourceInflowinventoryPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-inflowinventory-update" + x-speakeasy-entity: Source_Inflowinventory + x-speakeasy-param-suppress-computed-diff: true SourceInsightlyPutRequest: required: - "name" @@ -119860,6 +135960,22 @@ components: $ref: "#/components/schemas/source-invoiced-update" x-speakeasy-entity: Source_Invoiced x-speakeasy-param-suppress-computed-diff: true + SourceInvoiceninjaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-invoiceninja-update" + x-speakeasy-entity: Source_Invoiceninja + x-speakeasy-param-suppress-computed-diff: true SourceIp2whoisPutRequest: required: - "name" @@ -119908,6 +136024,22 @@ components: $ref: "#/components/schemas/source-jira-update" x-speakeasy-entity: Source_Jira x-speakeasy-param-suppress-computed-diff: true + SourceJobnimbusPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-jobnimbus-update" + x-speakeasy-entity: Source_Jobnimbus + x-speakeasy-param-suppress-computed-diff: true SourceJotformPutRequest: required: - "name" @@ -120132,6 +136264,22 @@ components: $ref: "#/components/schemas/source-lemlist-update" x-speakeasy-entity: Source_Lemlist x-speakeasy-param-suppress-computed-diff: true + SourceLessAnnoyingCrmPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-less-annoying-crm-update" + x-speakeasy-entity: Source_LessAnnoyingCrm + x-speakeasy-param-suppress-computed-diff: true SourceLeverHiringPutRequest: required: - "name" @@ -120148,6 +136296,22 @@ components: $ref: "#/components/schemas/source-lever-hiring-update" x-speakeasy-entity: Source_LeverHiring x-speakeasy-param-suppress-computed-diff: true + SourceLightspeedRetailPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-lightspeed-retail-update" + x-speakeasy-entity: Source_LightspeedRetail + x-speakeasy-param-suppress-computed-diff: true SourceLinkedinAdsPutRequest: required: - "name" @@ -120340,6 +136504,38 @@ components: $ref: "#/components/schemas/source-mailjet-sms-update" x-speakeasy-entity: Source_MailjetSms x-speakeasy-param-suppress-computed-diff: true + SourceMailosaurPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mailosaur-update" + x-speakeasy-entity: Source_Mailosaur + x-speakeasy-param-suppress-computed-diff: true + SourceMailtrapPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mailtrap-update" + x-speakeasy-entity: Source_Mailtrap + x-speakeasy-param-suppress-computed-diff: true SourceMarketoPutRequest: required: - "name" @@ -120356,6 +136552,22 @@ components: $ref: "#/components/schemas/source-marketo-update" x-speakeasy-entity: Source_Marketo x-speakeasy-param-suppress-computed-diff: true + SourceMarketstackPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-marketstack-update" + x-speakeasy-entity: Source_Marketstack + x-speakeasy-param-suppress-computed-diff: true SourceMentionPutRequest: required: - "name" @@ -120724,6 +136936,38 @@ components: $ref: "#/components/schemas/source-news-api-update" x-speakeasy-entity: Source_NewsApi x-speakeasy-param-suppress-computed-diff: true + SourceNewsdataIoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-newsdata-io-update" + x-speakeasy-entity: Source_NewsdataIo + x-speakeasy-param-suppress-computed-diff: true + SourceNocrmPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-nocrm-update" + x-speakeasy-entity: Source_Nocrm + x-speakeasy-param-suppress-computed-diff: true SourceNorthpassLmsPutRequest: required: - "name" @@ -120756,6 +137000,22 @@ components: $ref: "#/components/schemas/source-notion-update" x-speakeasy-entity: Source_Notion x-speakeasy-param-suppress-computed-diff: true + SourceNutshellPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-nutshell-update" + x-speakeasy-entity: Source_Nutshell + x-speakeasy-param-suppress-computed-diff: true SourceNylasPutRequest: required: - "name" @@ -120820,6 +137080,38 @@ components: $ref: "#/components/schemas/source-omnisend-update" x-speakeasy-entity: Source_Omnisend x-speakeasy-param-suppress-computed-diff: true + SourceOncehubPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-oncehub-update" + x-speakeasy-entity: Source_Oncehub + x-speakeasy-param-suppress-computed-diff: true + SourceOnepagecrmPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-onepagecrm-update" + x-speakeasy-entity: Source_Onepagecrm + x-speakeasy-param-suppress-computed-diff: true SourceOnesignalPutRequest: required: - "name" @@ -120836,6 +137128,22 @@ components: $ref: "#/components/schemas/source-onesignal-update" x-speakeasy-entity: Source_Onesignal x-speakeasy-param-suppress-computed-diff: true + SourceOnfleetPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-onfleet-update" + x-speakeasy-entity: Source_Onfleet + x-speakeasy-param-suppress-computed-diff: true SourceOpenDataDcPutRequest: required: - "name" @@ -120852,6 +137160,38 @@ components: $ref: "#/components/schemas/source-open-data-dc-update" x-speakeasy-entity: Source_OpenDataDc x-speakeasy-param-suppress-computed-diff: true + SourceOpenaqPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-openaq-update" + x-speakeasy-entity: Source_Openaq + x-speakeasy-param-suppress-computed-diff: true + SourceOpenfdaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-openfda-update" + x-speakeasy-entity: Source_Openfda + x-speakeasy-param-suppress-computed-diff: true SourceOpenweatherPutRequest: required: - "name" @@ -120868,6 +137208,22 @@ components: $ref: "#/components/schemas/source-openweather-update" x-speakeasy-entity: Source_Openweather x-speakeasy-param-suppress-computed-diff: true + SourceOpinionStagePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-opinion-stage-update" + x-speakeasy-entity: Source_OpinionStage + x-speakeasy-param-suppress-computed-diff: true SourceOpsgeniePutRequest: required: - "name" @@ -120980,6 +137336,38 @@ components: $ref: "#/components/schemas/source-outreach-update" x-speakeasy-entity: Source_Outreach x-speakeasy-param-suppress-computed-diff: true + SourceOveitPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-oveit-update" + x-speakeasy-entity: Source_Oveit + x-speakeasy-param-suppress-computed-diff: true + SourcePabblySubscriptionsBillingPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pabbly-subscriptions-billing-update" + x-speakeasy-entity: Source_PabblySubscriptionsBilling + x-speakeasy-param-suppress-computed-diff: true SourcePandadocPutRequest: required: - "name" @@ -120996,6 +137384,38 @@ components: $ref: "#/components/schemas/source-pandadoc-update" x-speakeasy-entity: Source_Pandadoc x-speakeasy-param-suppress-computed-diff: true + SourcePaperformPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-paperform-update" + x-speakeasy-entity: Source_Paperform + x-speakeasy-param-suppress-computed-diff: true + SourcePapersignPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-papersign-update" + x-speakeasy-entity: Source_Papersign + x-speakeasy-param-suppress-computed-diff: true SourcePardotPutRequest: required: - "name" @@ -121172,6 +137592,22 @@ components: $ref: "#/components/schemas/source-pipedrive-update" x-speakeasy-entity: Source_Pipedrive x-speakeasy-param-suppress-computed-diff: true + SourcePipelinerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pipeliner-update" + x-speakeasy-entity: Source_Pipeliner + x-speakeasy-param-suppress-computed-diff: true SourcePivotalTrackerPutRequest: required: - "name" @@ -121364,6 +137800,22 @@ components: $ref: "#/components/schemas/source-prestashop-update" x-speakeasy-entity: Source_Prestashop x-speakeasy-param-suppress-computed-diff: true + SourcePretixPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pretix-update" + x-speakeasy-entity: Source_Pretix + x-speakeasy-param-suppress-computed-diff: true SourcePrimetricPutRequest: required: - "name" @@ -121620,6 +138072,22 @@ components: $ref: "#/components/schemas/source-rentcast-update" x-speakeasy-entity: Source_Rentcast x-speakeasy-param-suppress-computed-diff: true + SourceRepairshoprPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-repairshopr-update" + x-speakeasy-entity: Source_Repairshopr + x-speakeasy-param-suppress-computed-diff: true SourceReplyIoPutRequest: required: - "name" @@ -121668,6 +138136,22 @@ components: $ref: "#/components/schemas/source-revenuecat-update" x-speakeasy-entity: Source_Revenuecat x-speakeasy-param-suppress-computed-diff: true + SourceRevolutMerchantPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-revolut-merchant-update" + x-speakeasy-entity: Source_RevolutMerchant + x-speakeasy-param-suppress-computed-diff: true SourceRkiCovidPutRequest: required: - "name" @@ -121684,6 +138168,22 @@ components: $ref: "#/components/schemas/source-rki-covid-update" x-speakeasy-entity: Source_RkiCovid x-speakeasy-param-suppress-computed-diff: true + SourceRocketlanePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-rocketlane-update" + x-speakeasy-entity: Source_Rocketlane + x-speakeasy-param-suppress-computed-diff: true SourceRollbarPutRequest: required: - "name" @@ -121732,6 +138232,22 @@ components: $ref: "#/components/schemas/source-rss-update" x-speakeasy-entity: Source_Rss x-speakeasy-param-suppress-computed-diff: true + SourceRuddrPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-ruddr-update" + x-speakeasy-entity: Source_Ruddr + x-speakeasy-param-suppress-computed-diff: true SourceS3PutRequest: required: - "name" @@ -121780,6 +138296,22 @@ components: $ref: "#/components/schemas/source-sage-hr-update" x-speakeasy-entity: Source_SageHr x-speakeasy-param-suppress-computed-diff: true + SourceSalesflarePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-salesflare-update" + x-speakeasy-entity: Source_Salesflare + x-speakeasy-param-suppress-computed-diff: true SourceSalesforcePutRequest: required: - "name" @@ -121924,6 +138456,38 @@ components: $ref: "#/components/schemas/source-sendinblue-update" x-speakeasy-entity: Source_Sendinblue x-speakeasy-param-suppress-computed-diff: true + SourceSendowlPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sendowl-update" + x-speakeasy-entity: Source_Sendowl + x-speakeasy-param-suppress-computed-diff: true + SourceSendpulsePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sendpulse-update" + x-speakeasy-entity: Source_Sendpulse + x-speakeasy-param-suppress-computed-diff: true SourceSenseforcePutRequest: required: - "name" @@ -122004,6 +138568,22 @@ components: $ref: "#/components/schemas/source-sharetribe-update" x-speakeasy-entity: Source_Sharetribe x-speakeasy-param-suppress-computed-diff: true + SourceShippoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-shippo-update" + x-speakeasy-entity: Source_Shippo + x-speakeasy-param-suppress-computed-diff: true SourceShopifyPutRequest: required: - "name" @@ -122068,6 +138648,22 @@ components: $ref: "#/components/schemas/source-sigma-computing-update" x-speakeasy-entity: Source_SigmaComputing x-speakeasy-param-suppress-computed-diff: true + SourceSimfinPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-simfin-update" + x-speakeasy-entity: Source_Simfin + x-speakeasy-param-suppress-computed-diff: true SourceSimplecastPutRequest: required: - "name" @@ -122148,6 +138744,22 @@ components: $ref: "#/components/schemas/source-smartengage-update" x-speakeasy-entity: Source_Smartengage x-speakeasy-param-suppress-computed-diff: true + SourceSmartreachPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-smartreach-update" + x-speakeasy-entity: Source_Smartreach + x-speakeasy-param-suppress-computed-diff: true SourceSmartsheetsPutRequest: required: - "name" @@ -122292,6 +138904,22 @@ components: $ref: "#/components/schemas/source-split-io-update" x-speakeasy-entity: Source_SplitIo x-speakeasy-param-suppress-computed-diff: true + SourceSpotlercrmPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-spotlercrm-update" + x-speakeasy-entity: Source_Spotlercrm + x-speakeasy-param-suppress-computed-diff: true SourceSquarePutRequest: required: - "name" @@ -122356,6 +138984,22 @@ components: $ref: "#/components/schemas/source-statuspage-update" x-speakeasy-entity: Source_Statuspage x-speakeasy-param-suppress-computed-diff: true + SourceStockdataPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-stockdata-update" + x-speakeasy-entity: Source_Stockdata + x-speakeasy-param-suppress-computed-diff: true SourceStravaPutRequest: required: - "name" @@ -122436,6 +139080,38 @@ components: $ref: "#/components/schemas/source-survicate-update" x-speakeasy-entity: Source_Survicate x-speakeasy-param-suppress-computed-diff: true + SourceSystemePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-systeme-update" + x-speakeasy-entity: Source_Systeme + x-speakeasy-param-suppress-computed-diff: true + SourceTaboolaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-taboola-update" + x-speakeasy-entity: Source_Taboola + x-speakeasy-param-suppress-computed-diff: true SourceTeamtailorPutRequest: required: - "name" @@ -122548,6 +139224,22 @@ components: $ref: "#/components/schemas/source-ticketmaster-update" x-speakeasy-entity: Source_Ticketmaster x-speakeasy-param-suppress-computed-diff: true + SourceTickettailorPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-tickettailor-update" + x-speakeasy-entity: Source_Tickettailor + x-speakeasy-param-suppress-computed-diff: true SourceTiktokMarketingPutRequest: required: - "name" @@ -122580,6 +139272,22 @@ components: $ref: "#/components/schemas/source-timely-update" x-speakeasy-entity: Source_Timely x-speakeasy-param-suppress-computed-diff: true + SourceTinyemailPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-tinyemail-update" + x-speakeasy-entity: Source_Tinyemail + x-speakeasy-param-suppress-computed-diff: true SourceTodoistPutRequest: required: - "name" @@ -122596,6 +139304,22 @@ components: $ref: "#/components/schemas/source-todoist-update" x-speakeasy-entity: Source_Todoist x-speakeasy-param-suppress-computed-diff: true + SourceTrackPmsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-track-pms-update" + x-speakeasy-entity: Source_TrackPms + x-speakeasy-param-suppress-computed-diff: true SourceTrelloPutRequest: required: - "name" @@ -122612,6 +139336,22 @@ components: $ref: "#/components/schemas/source-trello-update" x-speakeasy-entity: Source_Trello x-speakeasy-param-suppress-computed-diff: true + SourceTremendousPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-tremendous-update" + x-speakeasy-entity: Source_Tremendous + x-speakeasy-param-suppress-computed-diff: true SourceTrustpilotPutRequest: required: - "name" @@ -122724,6 +139464,22 @@ components: $ref: "#/components/schemas/source-typeform-update" x-speakeasy-entity: Source_Typeform x-speakeasy-param-suppress-computed-diff: true + SourceUbidotsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-ubidots-update" + x-speakeasy-entity: Source_Ubidots + x-speakeasy-param-suppress-computed-diff: true SourceUnleashPutRequest: required: - "name" @@ -122820,6 +139576,22 @@ components: $ref: "#/components/schemas/source-veeqo-update" x-speakeasy-entity: Source_Veeqo x-speakeasy-param-suppress-computed-diff: true + SourceVercelPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-vercel-update" + x-speakeasy-entity: Source_Vercel + x-speakeasy-param-suppress-computed-diff: true SourceVismaEconomicPutRequest: required: - "name" @@ -122916,6 +139688,22 @@ components: $ref: "#/components/schemas/source-weatherstack-update" x-speakeasy-entity: Source_Weatherstack x-speakeasy-param-suppress-computed-diff: true + SourceWebScrapperPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-web-scrapper-update" + x-speakeasy-entity: Source_WebScrapper + x-speakeasy-param-suppress-computed-diff: true SourceWebflowPutRequest: required: - "name" @@ -123076,6 +139864,22 @@ components: $ref: "#/components/schemas/source-wrike-update" x-speakeasy-entity: Source_Wrike x-speakeasy-param-suppress-computed-diff: true + SourceWufooPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-wufoo-update" + x-speakeasy-entity: Source_Wufoo + x-speakeasy-param-suppress-computed-diff: true SourceXkcdPutRequest: required: - "name" @@ -123188,6 +139992,22 @@ components: $ref: "#/components/schemas/source-youtube-analytics-update" x-speakeasy-entity: Source_YoutubeAnalytics x-speakeasy-param-suppress-computed-diff: true + SourceYoutubeDataPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-youtube-data-update" + x-speakeasy-entity: Source_YoutubeData + x-speakeasy-param-suppress-computed-diff: true SourceZapierSupportedStoragePutRequest: required: - "name" @@ -123300,6 +140120,54 @@ components: $ref: "#/components/schemas/source-zenloop-update" x-speakeasy-entity: Source_Zenloop x-speakeasy-param-suppress-computed-diff: true + SourceZohoAnalyticsMetadataApiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoho-analytics-metadata-api-update" + x-speakeasy-entity: Source_ZohoAnalyticsMetadataApi + x-speakeasy-param-suppress-computed-diff: true + SourceZohoBiginPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoho-bigin-update" + x-speakeasy-entity: Source_ZohoBigin + x-speakeasy-param-suppress-computed-diff: true + SourceZohoBillingPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoho-billing-update" + x-speakeasy-entity: Source_ZohoBilling + x-speakeasy-param-suppress-computed-diff: true SourceZohoBooksPutRequest: required: - "name" @@ -123316,6 +140184,22 @@ components: $ref: "#/components/schemas/source-zoho-books-update" x-speakeasy-entity: Source_ZohoBooks x-speakeasy-param-suppress-computed-diff: true + SourceZohoCampaignPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoho-campaign-update" + x-speakeasy-entity: Source_ZohoCampaign + x-speakeasy-param-suppress-computed-diff: true SourceZohoCrmPutRequest: required: - "name" @@ -123332,6 +140216,38 @@ components: $ref: "#/components/schemas/source-zoho-crm-update" x-speakeasy-entity: Source_ZohoCrm x-speakeasy-param-suppress-computed-diff: true + SourceZohoDeskPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoho-desk-update" + x-speakeasy-entity: Source_ZohoDesk + x-speakeasy-param-suppress-computed-diff: true + SourceZohoExpensePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoho-expense-update" + x-speakeasy-entity: Source_ZohoExpense + x-speakeasy-param-suppress-computed-diff: true SourceZohoInventoryPutRequest: required: - "name" @@ -123348,6 +140264,38 @@ components: $ref: "#/components/schemas/source-zoho-inventory-update" x-speakeasy-entity: Source_ZohoInventory x-speakeasy-param-suppress-computed-diff: true + SourceZohoInvoicePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoho-invoice-update" + x-speakeasy-entity: Source_ZohoInvoice + x-speakeasy-param-suppress-computed-diff: true + SourceZonkaFeedbackPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zonka-feedback-update" + x-speakeasy-entity: Source_ZonkaFeedback + x-speakeasy-param-suppress-computed-diff: true SourceZoomPutRequest: required: - "name" diff --git a/airbyte-api/server-api/src/main/openapi/config.yaml b/airbyte-api/server-api/src/main/openapi/config.yaml index 132150fbbed..f6ff3e9b1e2 100644 --- a/airbyte-api/server-api/src/main/openapi/config.yaml +++ b/airbyte-api/server-api/src/main/openapi/config.yaml @@ -3051,6 +3051,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connector_builder_projects/get_oauth_consent_url: + post: + tags: + - connector_builder_project + summary: Given a connector builder project ID, return the URL to the consent screen where to redirect the user to. + operationId: getConnectorBuilderProjectOAuthConsent + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/BuilderProjectOauthConsentRequest" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/OAuthConsentRead" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/source_oauths/complete_oauth: post: tags: @@ -3074,6 +3097,29 @@ paths: $ref: "#/components/responses/NotFoundResponse" "422": $ref: "#/components/responses/InvalidInputResponse" + /v1/connector_builder_projects/complete_oauth: + post: + tags: + - connector_builder_project + summary: Given a builder project Id generate an access/refresh token etc. + operationId: completeConnectorBuilderProjectOauth + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/CompleteConnectorBuilderProjectOauthRequest" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/CompleteOAuthResponse" + "404": + $ref: "#/components/responses/NotFoundResponse" + "422": + $ref: "#/components/responses/InvalidInputResponse" /v1/source_oauths/revoke: post: tags: @@ -3175,6 +3221,25 @@ paths: application/json: schema: $ref: "#/components/schemas/WebBackendCheckUpdatesRead" + /v1/web_backend/mappers/validate: + post: + tags: + - web_backend + summary: Validates a draft set of mappers against a connection's configured streams + operationId: webBackendValidateMappers + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WebBackendValidateMappersRequestBody" + required: true + responses: + "200": + description: Successful operation + content: + application/json: + schema: + $ref: "#/components/schemas/WebBackendValidateMappersResponse" /v1/web_backend/connections/list: post: tags: @@ -4658,13 +4723,13 @@ paths: application/json: schema: $ref: "#/components/schemas/PaymentInformationRead" - /v1/billing/organization_balance: + /v1/billing/subscription_info: post: - summary: Get the current balance of an organization + summary: Get the current information about the organization's subscription tags: - billing - cloud-only - operationId: getOrganizationBalance + operationId: getSubscriptionInfo requestBody: content: application/json: @@ -4672,11 +4737,11 @@ paths: $ref: "#/components/schemas/OrganizationIdRequestBody" responses: "200": - description: Successfully got organization's balance + description: Successfully got organization subscription info content: application/json: schema: - $ref: "#/components/schemas/OrganizationBalanceRead" + $ref: "#/components/schemas/OrganizationSubscriptionInfoRead" /v1/billing/trial_status: post: summary: Get the current trial status of an organization @@ -5574,6 +5639,29 @@ paths: description: Successful operation "422": $ref: "#/components/responses/InvalidInputResponse" + + /v1/dataplanes/get_dataplane_id: + post: + summary: Get a dataplane ID given the provided request body params + tags: + - dataplane + operationId: getDataplaneId + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DataplaneGetIdRequestBody" + required: true + responses: + "200": + description: Successfully retrieved dataplane ID + content: + application/json: + schema: + $ref: "#/components/schemas/DataplaneRead" + "422": + $ref: "#/components/responses/InvalidInputResponse" + # Airbyte API /public: get: @@ -7313,8 +7401,6 @@ components: required: - value - config_key - - resource_id - - resource_type - scope_id - scope_type - origin @@ -7355,8 +7441,6 @@ components: type: object required: - config_key - - resource_id - - resource_type - scope_id - scope_type properties: @@ -7509,6 +7593,12 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" + actor_selection_info: + $ref: "#/components/schemas/ConnectorRolloutActorSelectionInfo" + actor_syncs: + type: array + items: + $ref: "#/components/schemas/ConnectorRolloutActorSyncInfo" ConnectorRolloutState: type: string @@ -7644,6 +7734,9 @@ components: updated_by: type: string format: uuid + migrate_pins: + type: boolean + default: true ConnectorRolloutStartResponse: type: object @@ -7727,6 +7820,9 @@ components: updated_by: type: string format: uuid + retain_pins_on_cancellation: + type: boolean + default: true ConnectorRolloutGetActorSyncInfoRequestBody: type: object @@ -7821,6 +7917,19 @@ components: format: uuid rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" + initial_rollout_pct: + type: integer + minimum: 0 + maximum: 100 + format: int32 + final_target_rollout_pct: + type: integer + minimum: 0 + maximum: 100 + format: int32 + migrate_pins: + type: boolean + default: true ConnectorRolloutManualRolloutRequestBody: type: object @@ -7853,6 +7962,9 @@ components: format: int32 minimum: 1 maximum: 100 + migrate_pins: + type: boolean + default: true ConnectorRolloutManualFinalizeRequestBody: type: object @@ -7883,6 +7995,9 @@ components: type: string failed_reason: type: string + retain_pins_on_cancellation: + type: boolean + default: true # WORKSPACE WorkspaceId: @@ -8173,13 +8288,27 @@ components: $ref: "#/components/schemas/OrganizationId" tombstone: type: boolean + workspaceLimits: + $ref: "#/components/schemas/WorkspaceLimits" + WorkspaceLimits: + type: object + required: + - sources + - destinations + - activeConnections + properties: + sources: + $ref: "#/components/schemas/Limit" + destinations: + $ref: "#/components/schemas/Limit" + activeConnections: + $ref: "#/components/schemas/Limit" WorkspaceOrganizationInfoRead: type: object description: Limited info about a workspace's organization that is safe to expose to workspace readers who are not members of the org. required: - organizationId - organizationName - - pba - sso properties: organizationId: @@ -8187,8 +8316,6 @@ components: format: uuid organizationName: type: string - pba: - type: boolean sso: type: boolean billing: @@ -8196,6 +8323,12 @@ components: required: - paymentStatus properties: + subscriptionStatus: + type: string + enum: + - pre_subscription + - unsubscribed + - subscribed paymentStatus: type: string enum: @@ -9170,6 +9303,8 @@ components: $ref: "#/components/schemas/SourceDefinitionSpecification" advancedAuth: $ref: "#/components/schemas/AdvancedAuth" + advancedAuthCredentialsAvailable: + type: boolean jobInfo: $ref: "#/components/schemas/SynchronousJobRead" # SOURCE @@ -9613,6 +9748,8 @@ components: $ref: "#/components/schemas/DestinationDefinitionSpecification" advancedAuth: $ref: "#/components/schemas/AdvancedAuth" + advancedAuthCredentialsAvailable: + type: boolean jobInfo: $ref: "#/components/schemas/SynchronousJobRead" supportedDestinationSyncModes: @@ -11027,6 +11164,7 @@ components: enum: - portal - payment_method + - setup default: portal CustomerPortalRead: type: object @@ -11132,22 +11270,22 @@ components: $ref: "#/components/schemas/AddressRead" defaultPaymentMethod: $ref: "#/components/schemas/PaymentMethodRead" - OrganizationBalanceRead: + OrganizationSubscriptionInfoRead: type: object required: - - hidden - - planType + - name + - selfServeSubscription + - balanceHidden properties: - hidden: + name: + type: string + selfServeSubscription: + type: boolean + cancellationDate: + $ref: "#/components/schemas/ISO8601DateTime" + balanceHidden: description: Whether the organization balance is hidden and should not be shown to the user. type: boolean - planType: - description: Whether the customer is on a prepaid or in_arrears type plan. This is only used during the migration and will be removed after the migration again. - type: string - enum: - - in_arrears - - prepaid - deprecated: true upcomingInvoice: type: object description: Describe the current state of the upcoming invoice. If `hidden` is `true`, this will be `null`, since no balance should be shown. @@ -11198,12 +11336,10 @@ components: - pre_trial - in_trial - post_trial - - unknown x-enum-descriptions: - The trial has not yet started for this organization - The organization is currently in the trial. End of the trial is indicated by the `trialEndsAt` property - The organization has left the trial already - - The trial status is unknown trialEndsAt: $ref: "#/components/schemas/ISO8601DateTime" @@ -11261,10 +11397,6 @@ components: format: uuid organizationName: type: string - pba: - type: boolean - orgLevelBilling: - type: boolean email: type: string OrganizationCreateRequestBody: @@ -11280,18 +11412,12 @@ components: type: string email: type: string - pba: - type: boolean - orgLevelBilling: - type: boolean OrganizationRead: type: object required: - organizationId - organizationName - email - - pba - - orgLevelBilling properties: organizationId: type: string @@ -11300,12 +11426,29 @@ components: type: string email: type: string - pba: - type: boolean - orgLevelBilling: - type: boolean ssoRealm: type: string + organizationLimits: + $ref: "#/components/schemas/OrganizationLimits" + OrganizationLimits: + type: object + properties: + users: + $ref: "#/components/schemas/Limit" + workspaces: + $ref: "#/components/schemas/Limit" + Limit: + type: object + required: + - current + - max + properties: + current: + type: integer + format: int64 + max: + type: integer + format: int64 OrganizationReadList: type: object required: @@ -11830,6 +11973,7 @@ components: - hashing - field-renaming - row-filtering + - encryption x-sdk-component: true ConfiguredStreamMapper: type: object @@ -11837,6 +11981,9 @@ components: - type - mapperConfiguration properties: + id: + type: string + format: uuid type: $ref: "#/components/schemas/StreamMapperType" mapperConfiguration: @@ -12448,7 +12595,7 @@ components: $ref: "#/components/schemas/LogSource" caller: $ref: "#/components/schemas/LogCaller" - strackTrace: + stackTrace: type: string LogRead: type: object @@ -13059,6 +13206,22 @@ components: $ref: "#/components/schemas/OAuthInputConfiguration" sourceId: $ref: "#/components/schemas/SourceId" + BuilderProjectOauthConsentRequest: + type: object + required: + - builderProjectId + - workspaceId + - redirectUrl + properties: + builderProjectId: + $ref: "#/components/schemas/ConnectorBuilderProjectId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + redirectUrl: + description: The url to redirect to after getting the user consent + type: string + oAuthInputConfiguration: + $ref: "#/components/schemas/OAuthInputConfiguration" DestinationOauthConsentRequest: type: object required: @@ -13084,6 +13247,23 @@ components: properties: consentUrl: type: string + CompleteConnectorBuilderProjectOauthRequest: + type: object + required: + - builderProjectId + - workspaceId + properties: + builderProjectId: + $ref: "#/components/schemas/ConnectorBuilderProjectId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + queryParams: + description: The query parameters present in the redirect URL after a user granted consent e.g auth code + type: object + additionalProperties: true # Oauth parameters like code, state, etc.. will be different per API so we don't specify them in advance + redirectUrl: + description: When completing OAuth flow to gain an access token, some API sometimes requires to verify that the app re-send the redirectUrl that was used when consent was given. + type: string CompleteSourceOauthRequest: type: object required: @@ -13213,6 +13393,91 @@ components: type: integer sourceDefinitions: type: integer + WebBackendValidateMappersRequestBody: + type: object + required: + - connectionId + - mappers + - streamDescriptor + properties: + connectionId: + $ref: "#/components/schemas/ConnectionId" + streamDescriptor: + $ref: "#/components/schemas/StreamDescriptor" + mappers: + description: Mappers that should be applied to the stream before writing to the destination. + type: array + items: + $ref: "#/components/schemas/ConfiguredStreamMapper" + WebBackendValidateMappersResponse: + type: object + required: + - initialFields + - mappers + properties: + initialFields: + type: array + items: + $ref: "#/components/schemas/FieldSpec" + mappers: + type: array + items: + $ref: "#/components/schemas/MapperValidationResult" + FieldSpec: + type: object + required: + - name + - type + properties: + name: + type: string + type: + type: string + enum: + - STRING + - BOOLEAN + - DATE + - TIMESTAMP_WITHOUT_TIMEZONE + - TIMESTAMP_WITH_TIMEZONE + - TIME_WITHOUT_TIMEZONE + - TIME_WITH_TIMEZONE + - INTEGER + - NUMBER + - ARRAY + - OBJECT + - MULTI + - UNKNOWN + MapperValidationResult: + type: object + required: + - outputFields + properties: + id: + type: string + format: uuid + outputFields: + type: array + items: + $ref: "#/components/schemas/FieldSpec" + validationError: + $ref: "#/components/schemas/MapperValidationError" + MapperValidationError: + type: object + required: + - type + - message + properties: + type: + $ref: "#/components/schemas/MapperValidationErrorType" + message: + type: string + MapperValidationErrorType: + type: string + enum: + - MISSING_MAPPER + - INVALID_MAPPER_CONFIG + - FIELD_NOT_FOUND + - FIELD_ALREADY_EXISTS WebBackendConnectionListRequestBody: type: object required: @@ -15413,6 +15678,29 @@ components: type: string workflowId: type: string + DataplaneGetIdRequestBody: + type: object + required: + - workloadPriority + properties: + connectionId: + $ref: "#/components/schemas/ConnectionId" + actorType: + $ref: "#/components/schemas/ActorType" + actorId: + $ref: "#/components/schemas/ActorId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + workloadPriority: + $ref: "#/components/schemas/WorkloadPriority" + DataplaneRead: + type: object + required: + - id + properties: + id: + type: string + responses: NotFoundResponse: description: Object with given id was not found. diff --git a/airbyte-api/workload-api/build.gradle.kts b/airbyte-api/workload-api/build.gradle.kts index 828e866ecf9..f45b6181062 100644 --- a/airbyte-api/workload-api/build.gradle.kts +++ b/airbyte-api/workload-api/build.gradle.kts @@ -141,8 +141,8 @@ private fun updateApiClientWithFailsafe(clientPath: String) { var apiClientFileText = apiClientFile.readText() // replace class declaration .replace( - "open class ApiClient(val baseUrl: String, val client: OkHttpClient = defaultClient) {", - "open class ApiClient(val baseUrl: String, val client: OkHttpClient = defaultClient, val policy : RetryPolicy = RetryPolicy.ofDefaults()) {") + "open class ApiClient(val baseUrl: String, val client: Call.Factory = defaultClient) {", + "open class ApiClient(val baseUrl: String, val client: Call.Factory = defaultClient, val policy : RetryPolicy = RetryPolicy.ofDefaults()) {") // replace execute call .replace("val response = client.newCall(request).execute()", """val call = client.newCall(request) @@ -206,8 +206,8 @@ private fun updateDomainClientsWithFailsafe(clientPath: String) { // replace class declaration domainClientFileText = domainClientFileText.replace( - "class (\\S+)\\(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient\\) : ApiClient\\(basePath, client\\)".toRegex(), - "class $1(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient, policy : RetryPolicy = RetryPolicy.ofDefaults()) : ApiClient(basePath, client, policy)" + "class (\\S+)\\(basePath: kotlin.String = defaultBasePath, client: Call.Factory = ApiClient.defaultClient\\) : ApiClient\\(basePath, client\\)".toRegex(), + "class $1(basePath: kotlin.String = defaultBasePath, client: Call.Factory = ApiClient.defaultClient, policy : RetryPolicy = RetryPolicy.ofDefaults()) : ApiClient(basePath, client, policy)" ) // add imports if not exist diff --git a/airbyte-api/workload-api/src/main/openapi/workload-openapi.yaml b/airbyte-api/workload-api/src/main/openapi/workload-openapi.yaml index f7eca9e7040..6b95894929f 100644 --- a/airbyte-api/workload-api/src/main/openapi/workload-openapi.yaml +++ b/airbyte-api/workload-api/src/main/openapi/workload-openapi.yaml @@ -560,6 +560,7 @@ components: enum: - high - default + x-type: String WorkloadRunningRequest: required: - workloadId @@ -581,6 +582,7 @@ components: - success - failure - cancelled + x-type: String WorkloadSuccessRequest: required: - workloadId @@ -595,3 +597,4 @@ components: - check - discover - spec + x-type: String diff --git a/airbyte-audit-logging/build.gradle.kts b/airbyte-audit-logging/build.gradle.kts new file mode 100644 index 00000000000..ba1c2831cc0 --- /dev/null +++ b/airbyte-audit-logging/build.gradle.kts @@ -0,0 +1,41 @@ +plugins { + id("io.airbyte.gradle.jvm.lib") + id("io.airbyte.gradle.publish") +} + +dependencies { + ksp(platform(libs.micronaut.platform)) + ksp(libs.bundles.micronaut.annotation.processor) + + api(libs.bundles.micronaut.annotation) + api(libs.bundles.micronaut.kotlin) + api(libs.bundles.micronaut.metrics) + api(libs.kotlin.logging) + api(libs.commons.io) + api(libs.azure.storage) + api(libs.aws.java.sdk.s3) + api(libs.aws.java.sdk.sts) + api(libs.s3) + api(libs.google.cloud.storage) + api(libs.guava) + api(libs.slf4j.api) + api(libs.jackson.kotlin) + + implementation(project(":oss:airbyte-commons")) + implementation(project(":oss:airbyte-api:problems-api")) + implementation(project(":oss:airbyte-api:server-api")) + implementation(project(":oss:airbyte-data")) + implementation(project(":oss:airbyte-commons-server")) + implementation(project(":oss:airbyte-commons-storage")) + implementation(libs.bundles.micronaut) + implementation(libs.micronaut.inject) + implementation(libs.bundles.logback) + implementation(libs.jackson.annotations) + implementation(libs.jackson.databind) + + kspTest(libs.bundles.micronaut.test.annotation.processor) + + testImplementation(libs.bundles.micronaut.test) + testImplementation(libs.bundles.junit) + testImplementation(libs.mockk) +} diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLogEntry.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLogEntry.kt new file mode 100644 index 00000000000..fcceff9224d --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLogEntry.kt @@ -0,0 +1,13 @@ +package io.airbyte.audit.logging + +import java.util.UUID + +data class AuditLogEntry( + val id: UUID, + val timestamp: Long, + val user: User? = null, + val actionName: String, + val summary: String, + val success: Boolean, + val errorMessage: String? = null, +) diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLoggingHelper.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLoggingHelper.kt new file mode 100644 index 00000000000..df2cd7f369d --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLoggingHelper.kt @@ -0,0 +1,67 @@ +package io.airbyte.audit.logging + +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.api.model.generated.PermissionRead +import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.config.Permission +import io.airbyte.data.services.PermissionService +import jakarta.inject.Singleton +import java.util.UUID + +@Singleton +class AuditLoggingHelper( + private val permissionService: PermissionService, + private val currentUserService: CurrentUserService, + private val objectMapper: ObjectMapper, +) { + fun getCurrentUser(): User { + val currentUser = currentUserService.getCurrentUser() + return User( + userId = currentUser.getUserId().toString(), + email = currentUser.email, + ) + } + + fun getPermission(permissionId: UUID): Permission { + return permissionService.getPermission(permissionId = permissionId) + } + + fun getPermissionScope(permissionRead: PermissionRead): String { + return when { + permissionRead.organizationId != null -> "organization" + permissionRead.workspaceId != null -> "workspace" + else -> "instance" + } + } + + fun getPermissionScope(permission: Permission): String { + return when { + permission.organizationId != null -> "organization" + permission.workspaceId != null -> "workspace" + else -> "instance" + } + } + + fun generateSummary( + requestSummary: String, + resultSummary: String, + ): String { + val requestJsonNode: JsonNode = objectMapper.readTree(requestSummary) + val resultJsonNode: JsonNode = objectMapper.readTree(resultSummary) + + // Create a copy of the requestJsonNode + val mergedJsonNode = requestJsonNode.deepCopy() as ObjectNode + + // Merge the resultJsonNode + if (resultJsonNode is ObjectNode) { + resultJsonNode.fieldNames().forEachRemaining { fieldName -> + mergedJsonNode.set(fieldName, resultJsonNode.get(fieldName)) + } + } + + // Return the merged JSON as a string + return objectMapper.writeValueAsString(mergedJsonNode) + } +} diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLoggingInterceptor.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLoggingInterceptor.kt new file mode 100644 index 00000000000..4a7d8e98589 --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditLoggingInterceptor.kt @@ -0,0 +1,141 @@ +package io.airbyte.audit.logging + +import io.airbyte.commons.annotation.AuditLogging +import io.airbyte.commons.annotation.InternalForTesting +import io.airbyte.commons.logging.DEFAULT_AUDIT_LOGGING_PATH_MDC_KEY +import io.airbyte.commons.storage.AUDIT_LOGGING +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.aop.InterceptorBean +import io.micronaut.aop.MethodInterceptor +import io.micronaut.aop.MethodInvocationContext +import io.micronaut.context.ApplicationContext +import io.micronaut.context.annotation.Value +import io.micronaut.http.HttpHeaders +import io.micronaut.http.context.ServerRequestContext +import io.micronaut.http.server.netty.NettyHttpRequest +import io.micronaut.inject.qualifiers.Qualifiers +import jakarta.inject.Singleton +import org.slf4j.MDC +import java.util.UUID + +private val logger = KotlinLogging.logger { AUDIT_LOGGING } + +/** + * Interceptor that logs the requests and stores the log entries. + */ +@Singleton +@InterceptorBean(AuditLogging::class) +class AuditLoggingInterceptor( + @Value("\${airbyte.audit.logging.enabled}") private val auditLoggingEnabled: Boolean, + @Value("\${airbyte.cloud.storage.bucket.audit-logging:}") private val auditLoggingBucket: String?, + private val applicationContext: ApplicationContext, + private val auditLoggingHelper: AuditLoggingHelper, +) : MethodInterceptor { + override fun intercept(context: MethodInvocationContext): Any { + if (!auditLoggingEnabled) { + logger.debug { "Proceed the request without audit logging because it is disabled." } + return context.proceed() ?: Unit + } + + val annotation = context.getAnnotation(AuditLogging::class.java) + if (annotation == null) { + logger.error { "Failed to retrieve the audit logging annotation." } + return context.proceed() ?: Unit + } + + val providerName = annotation.stringValue("provider") + if (providerName == null || providerName.isEmpty) { + logger.error { "Provider name is missing. Bypassing audit logging." } + return context.proceed() ?: Unit + } + + val provider = applicationContext.findBean(AuditProvider::class.java, Qualifiers.byName(providerName.get())) + if (provider.isEmpty) { + logger.error { "Failed to retrieve the audit provider. Bypassing audit logging." } + return context.proceed() ?: Unit + } + + // Get action name + val actionName = context.methodName + logger.debug { "Audit logging the request, audit action: $actionName" } + // Get request headers + val request = + ServerRequestContext.currentRequest().get() as NettyHttpRequest + val headers = request.headers + val user = getCurrentUserInfo(headers) + // Get request body + val parameters = context.parameters.values + val requestBody = parameters.firstOrNull()?.value + // Generate the summary from the request, before proceeding the request + val requestSummary = provider.get().generateSummaryFromRequest(requestBody) + + // Proceed the request and log the result/error + val result = + try { + context.proceed() + } catch (exception: Exception) { + logAuditInfo( + user = user, + actionName = actionName, + summary = "", + success = false, + error = exception.message, + ) + throw exception + } + val resultSummary = provider.get().generateSummaryFromResult(result) + // 1. Merge the summary + val summary = auditLoggingHelper.generateSummary(requestSummary, resultSummary) + + // 2. Save the log + logAuditInfo( + user = user, + actionName = actionName, + summary = summary, + success = true, + error = null, + ) + return result ?: Unit + } + + private fun getCurrentUserInfo(headers: HttpHeaders): User { + val currentUser = auditLoggingHelper.getCurrentUser() + val userAgent = headers.get("User-Agent")?.takeIf { it.isNotEmpty() } ?: "unknown" + val ipAddress = headers.get("X-Forwarded-For")?.takeIf { it.isNotEmpty() } ?: "unknown" + currentUser.userAgent = userAgent + currentUser.ipAddress = ipAddress + return currentUser + } + + @InternalForTesting + internal fun logAuditInfo( + user: User, + actionName: String, + summary: String, + success: Boolean, + error: String? = null, + ) { + val auditLogEntry = + AuditLogEntry( + id = UUID.randomUUID(), + timestamp = System.currentTimeMillis(), + user = user, + actionName = actionName, + summary = summary, + success = success, + errorMessage = error, + ) + if (auditLoggingBucket.isNullOrBlank()) { + // Common log to console only + logger.info { "Logging audit entry: $auditLogEntry" } + } else { + // Log to both cloud storage (via MDC routing) and also console + MDC.put(DEFAULT_AUDIT_LOGGING_PATH_MDC_KEY, AUDIT_LOGGING) + try { + logger.info { "Logging audit entry: $auditLogEntry" } + } finally { + MDC.remove(DEFAULT_AUDIT_LOGGING_PATH_MDC_KEY) + } + } + } +} diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditPermissionLogEntry.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditPermissionLogEntry.kt new file mode 100644 index 00000000000..e136fc9f66d --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditPermissionLogEntry.kt @@ -0,0 +1,18 @@ +package io.airbyte.audit.logging + +data class AuditPermissionLogEntry( + val targetUser: TargetUser, + val targetScope: TargetScope, + val previousRole: String? = null, + val newRole: String? = null, +) + +data class TargetUser( + val id: String, + val email: String? = null, +) + +data class TargetScope( + val type: String, + val id: String, +) diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditProvider.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditProvider.kt new file mode 100644 index 00000000000..cc4a41b7cf6 --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/AuditProvider.kt @@ -0,0 +1,11 @@ +package io.airbyte.audit.logging + +interface AuditProvider { + companion object { + const val EMPTY_SUMMARY = "{}" + } + + fun generateSummaryFromRequest(request: Any?): String + + fun generateSummaryFromResult(result: Any?): String +} diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/CreatePermissionAuditProvider.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/CreatePermissionAuditProvider.kt new file mode 100644 index 00000000000..969eb66e070 --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/CreatePermissionAuditProvider.kt @@ -0,0 +1,35 @@ +package io.airbyte.audit.logging + +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.api.model.generated.PermissionRead +import jakarta.inject.Named +import jakarta.inject.Singleton + +@Singleton +@Named("createPermission") +class CreatePermissionAuditProvider( + private val helper: AuditLoggingHelper, +) : AuditProvider { + override fun generateSummaryFromRequest(request: Any?): String { + return AuditProvider.EMPTY_SUMMARY + } + + override fun generateSummaryFromResult(result: Any?): String { + if (result is PermissionRead) { + val permissionLogEntry = + AuditPermissionLogEntry( + // Todo: get user email from id + targetUser = TargetUser(id = result.userId.toString()), + previousRole = null, + newRole = result.permissionType.toString(), + targetScope = + TargetScope( + type = helper.getPermissionScope(result), + id = result.organizationId?.toString() ?: result.workspaceId?.toString() ?: "", + ), + ) + return ObjectMapper().writeValueAsString(permissionLogEntry) + } + return AuditProvider.EMPTY_SUMMARY + } +} diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/DeletePermissionAuditProvider.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/DeletePermissionAuditProvider.kt new file mode 100644 index 00000000000..76ed74fe751 --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/DeletePermissionAuditProvider.kt @@ -0,0 +1,45 @@ +package io.airbyte.audit.logging + +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.api.model.generated.PermissionIdRequestBody +import io.github.oshai.kotlinlogging.KotlinLogging +import jakarta.inject.Named +import jakarta.inject.Singleton + +private val logger = KotlinLogging.logger {} + +@Singleton +@Named("deletePermission") +class DeletePermissionAuditProvider( + private val helper: AuditLoggingHelper, +) : AuditProvider { + override fun generateSummaryFromRequest(request: Any?): String { + try { + if (request is PermissionIdRequestBody) { + val permissionToDelete = helper.getPermission(request.permissionId) + val permissionLogEntry = + AuditPermissionLogEntry( + // Todo: get user email from id + targetUser = TargetUser(id = permissionToDelete.userId.toString()), + previousRole = permissionToDelete.permissionType.toString(), + newRole = null, + targetScope = + TargetScope( + type = helper.getPermissionScope(permissionToDelete), + id = permissionToDelete.organizationId?.toString() ?: permissionToDelete.workspaceId?.toString() ?: "", + ), + ) + return ObjectMapper().writeValueAsString(permissionLogEntry) + } + return AuditProvider.EMPTY_SUMMARY + } catch (e: Exception) { + logger.error { "Failed to generate summary from request. Error: ${e.message}" } + return AuditProvider.EMPTY_SUMMARY + } + } + + override fun generateSummaryFromResult(result: Any?): String { + // There is no result returned for deleting permission request + return AuditProvider.EMPTY_SUMMARY + } +} diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/UpdatePermissionAuditProvider.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/UpdatePermissionAuditProvider.kt new file mode 100644 index 00000000000..6a94c5b69e3 --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/UpdatePermissionAuditProvider.kt @@ -0,0 +1,47 @@ +package io.airbyte.audit.logging + +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.api.model.generated.PermissionUpdate +import io.github.oshai.kotlinlogging.KotlinLogging +import jakarta.inject.Named +import jakarta.inject.Singleton + +private val logger = KotlinLogging.logger {} + +@Singleton +@Named("updatePermission") +class UpdatePermissionAuditProvider( + private val helper: AuditLoggingHelper, +) : AuditProvider { + override fun generateSummaryFromRequest(request: Any?): String { + try { + if (request is PermissionUpdate) { + val previousPermission = helper.getPermission(request.permissionId) + val permissionLogEntry = + AuditPermissionLogEntry( + targetUser = TargetUser(id = previousPermission.userId.toString()), + previousRole = previousPermission.permissionType.toString(), + newRole = request.permissionType.toString(), + targetScope = + TargetScope( + type = helper.getPermissionScope(previousPermission), + id = + previousPermission.organizationId?.toString() + ?: previousPermission.workspaceId?.toString() + ?: "", + ), + ) + return ObjectMapper().writeValueAsString(permissionLogEntry) + } + return AuditProvider.EMPTY_SUMMARY + } catch (e: Exception) { + logger.error { "Failed to generate summary from request. Error: ${e.message}" } + return AuditProvider.EMPTY_SUMMARY + } + } + + override fun generateSummaryFromResult(result: Any?): String { + // There is no result returned for updating permission request + return AuditProvider.EMPTY_SUMMARY + } +} diff --git a/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/User.kt b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/User.kt new file mode 100644 index 00000000000..27c4eb01f73 --- /dev/null +++ b/airbyte-audit-logging/src/main/kotlin/io.airbyte.audit.logging/User.kt @@ -0,0 +1,3 @@ +package io.airbyte.audit.logging + +data class User(val userId: String, val email: String? = null, var ipAddress: String? = null, var userAgent: String? = null) diff --git a/airbyte-audit-logging/src/test/kotlin/io/airbyte/audit/logging/AuditLoggingHelperTest.kt b/airbyte-audit-logging/src/test/kotlin/io/airbyte/audit/logging/AuditLoggingHelperTest.kt new file mode 100644 index 00000000000..27c208d506e --- /dev/null +++ b/airbyte-audit-logging/src/test/kotlin/io/airbyte/audit/logging/AuditLoggingHelperTest.kt @@ -0,0 +1,68 @@ +package io.airbyte.audit.logging + +import com.fasterxml.jackson.databind.ObjectMapper +import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.data.services.PermissionService +import io.mockk.mockk +import io.mockk.unmockkAll +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test + +class AuditLoggingHelperTest { + private lateinit var permissionService: PermissionService + private lateinit var currentUserService: CurrentUserService + private lateinit var objectMapper: ObjectMapper + private lateinit var auditLoggingHelper: AuditLoggingHelper + + @BeforeEach + fun setUp() { + permissionService = mockk() + currentUserService = mockk() + objectMapper = ObjectMapper() + auditLoggingHelper = AuditLoggingHelper(permissionService, currentUserService, objectMapper) + } + + @AfterEach + fun tearDown() { + unmockkAll() + } + + @Nested + inner class TestGenerateFinalSummary { + @Test + fun `should generate a final summary from request and result`() { + val requestSummary = + "{\"targetUser\":{\"id\":\"dummyId\",\"email\":null},\"targetScope\":{\"type\":\"organization\",\"id\":\"dummyId\"}," + + "\"previousRole\":null,\"newRole\":\"organization_reader\"}" + val resultSummary = "{\"result\": \"summary\"}" + + val finalSummary = auditLoggingHelper.generateSummary(requestSummary, resultSummary) + + assert(finalSummary.contains("result")) + assert(finalSummary.contains("targetUser")) + assert(finalSummary.contains("email")) + } + + @Test + fun `should generate a final summary from request and result with empty request`() { + val requestSummary = "{}" + val resultSummary = "{\"result\": \"summary\"}" + + val finalSummary = auditLoggingHelper.generateSummary(requestSummary, resultSummary) + + assert(finalSummary.contains("result")) + } + + @Test + fun `should generate a final summary from request and result with empty result`() { + val requestSummary = "{}" + val resultSummary = "{}" + + val finalSummary = auditLoggingHelper.generateSummary(requestSummary, resultSummary) + + assert(finalSummary == "{}") + } + } +} diff --git a/airbyte-audit-logging/src/test/kotlin/io/airbyte/audit/logging/AuditLoggingInterceptorTest.kt b/airbyte-audit-logging/src/test/kotlin/io/airbyte/audit/logging/AuditLoggingInterceptorTest.kt new file mode 100644 index 00000000000..63fcf79aa2a --- /dev/null +++ b/airbyte-audit-logging/src/test/kotlin/io/airbyte/audit/logging/AuditLoggingInterceptorTest.kt @@ -0,0 +1,125 @@ +package io.airbyte.audit.logging + +import io.airbyte.api.model.generated.PermissionCreate +import io.airbyte.api.model.generated.PermissionRead +import io.airbyte.api.model.generated.PermissionType +import io.airbyte.commons.annotation.AuditLogging +import io.micronaut.aop.MethodInvocationContext +import io.micronaut.context.ApplicationContext +import io.micronaut.core.annotation.AnnotationValue +import io.micronaut.core.type.MutableArgumentValue +import io.micronaut.http.HttpHeaders +import io.micronaut.http.context.ServerRequestContext +import io.micronaut.http.server.netty.NettyHttpRequest +import io.micronaut.inject.qualifiers.Qualifiers +import io.mockk.every +import io.mockk.mockk +import io.mockk.mockkStatic +import io.mockk.spyk +import io.mockk.unmockkAll +import io.mockk.verify +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.Optional +import java.util.UUID + +class AuditLoggingInterceptorTest { + private lateinit var interceptor: AuditLoggingInterceptor + private lateinit var context: MethodInvocationContext + private lateinit var applicationContext: ApplicationContext + private lateinit var auditLoggingHelper: AuditLoggingHelper + + @BeforeEach + fun setUp() { + context = mockk() + applicationContext = mockk() + auditLoggingHelper = mockk() + } + + @AfterEach + fun tearDown() { + unmockkAll() + } + + @Test + fun `should only proceed the request without logging the result if it is not enabled`() { + interceptor = AuditLoggingInterceptor(false, null, applicationContext, auditLoggingHelper) + + every { context.methodName } returns "createPermission" + + every { context.proceed() } returns + PermissionRead() + .userId(UUID.randomUUID()) + .workspaceId(UUID.randomUUID()) + .organizationId(null) + .permissionType(PermissionType.WORKSPACE_EDITOR) + + interceptor.intercept(context) + + verify { context.proceed() } + } + + @Test + fun `should proceed the request and log the result`() { + interceptor = spyk(AuditLoggingInterceptor(true, "test-audit-log-bucket", applicationContext, auditLoggingHelper)) + val request = mockk>() + val headers = mockk() + + val actionName = "createPermission" + every { context.methodName } returns actionName + every { request.headers } returns headers + every { auditLoggingHelper.getCurrentUser() } returns User("userId", "email") + every { headers.get("User-Agent") } returns "userAgent" + every { headers.get("X-Forwarded-For") } returns null + + val parameterValue = mockk>() + val permissionUpdate = + PermissionCreate().apply { + permissionId = UUID.randomUUID() + permissionType = PermissionType.WORKSPACE_EDITOR + } + every { parameterValue.value } returns permissionUpdate + + val parameters = mutableMapOf>("permissionCreate" to parameterValue) + every { context.parameters } returns parameters + + mockkStatic(ServerRequestContext::class) + every { ServerRequestContext.currentRequest() } returns Optional.of(request) + + // Mock the audit logging annotation + val auditLoggingAnnotation = mockk>() + every { context.getAnnotation(AuditLogging::class.java) } returns auditLoggingAnnotation + every { auditLoggingAnnotation.stringValue("provider") } returns Optional.of("testProvider") + + // Mock the application context to return a fake provider + val fakeProvider = mockk() + every { fakeProvider.generateSummaryFromRequest(any()) } returns "{}" + every { fakeProvider.generateSummaryFromResult(any()) } returns "{\"result\": \"summary\"}" + every { auditLoggingHelper.generateSummary(any(), any()) } returns "{}" + every { applicationContext.findBean(AuditProvider::class.java, Qualifiers.byName("testProvider")) } returns Optional.of(fakeProvider) + + val targetUserId = UUID.randomUUID() + val workspaceId = UUID.randomUUID() + every { context.proceed() } returns + PermissionRead() + .userId(targetUserId) + .workspaceId(workspaceId) + .organizationId(null) + .permissionType(PermissionType.WORKSPACE_EDITOR) + + interceptor.intercept(context) + // Verifying that request is proceeded + verify { context.proceed() } + // Verify logAuditInfo was called with the correct parameters + verify { + interceptor.logAuditInfo( + user = match { it.userId == "userId" && it.userAgent == "userAgent" }, + actionName = "createPermission", + summary = "{}", + success = true, + error = null, + ) + } + } +} diff --git a/airbyte-bootloader/build.gradle.kts b/airbyte-bootloader/build.gradle.kts index 5765e2dce68..3e3f1cd2a59 100644 --- a/airbyte-bootloader/build.gradle.kts +++ b/airbyte-bootloader/build.gradle.kts @@ -5,8 +5,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) @@ -58,9 +56,6 @@ dependencies { testImplementation(libs.mockk) testRuntimeOnly(libs.junit.jupiter.engine) - - integrationTestCompileOnly(libs.lombok) - integrationTestAnnotationProcessor(libs.lombok) } airbyte { diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Application.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Application.java index eb040ee9053..53babcf9c14 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Application.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Application.java @@ -6,15 +6,18 @@ import io.micronaut.context.ApplicationContext; import io.micronaut.runtime.Micronaut; -import lombok.extern.slf4j.Slf4j; +import java.lang.invoke.MethodHandles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Main application entry point responsible for starting the server and invoking the bootstrapping * of the Airbyte environment. */ -@Slf4j public class Application { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static void main(final String[] args) { try { final ApplicationContext applicationContext = Micronaut.run(Application.class, args); diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Bootloader.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Bootloader.java index 5b2ad62600d..fa39f38d8d5 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Bootloader.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/Bootloader.java @@ -25,18 +25,21 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.List; import java.util.Optional; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Ensures that the databases are migrated to the appropriate level. */ @Singleton -@Slf4j public class Bootloader { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + // Ordered list of version upgrades that must be completed before upgrading to latest. private static final List REQUIRED_VERSION_UPGRADES = List.of( new AirbyteVersion("0.32.0-alpha"), diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/DefaultPostLoadExecutor.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/DefaultPostLoadExecutor.java index 847d2be6461..0124b776a1e 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/DefaultPostLoadExecutor.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/DefaultPostLoadExecutor.java @@ -9,8 +9,10 @@ import io.airbyte.config.init.PostLoadExecutor; import jakarta.inject.Named; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.util.Optional; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Default implementation of the tasks that should be executed after a successful bootstrapping of @@ -24,9 +26,10 @@ * */ @Singleton -@Slf4j public class DefaultPostLoadExecutor implements PostLoadExecutor { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final ApplyDefinitionsHelper applyDefinitionsHelper; private final DeclarativeSourceUpdater declarativeSourceUpdater; private final Optional authSecretInitializer; diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java index b97df99572e..a987a2276eb 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/ProtocolVersionChecker.java @@ -20,6 +20,7 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; @@ -28,15 +29,17 @@ import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Validates that all connectors support the desired target Airbyte protocol version. */ @Singleton -@Slf4j public class ProtocolVersionChecker { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final JobPersistence jobPersistence; private final AirbyteProtocolVersionRange airbyteProtocolTargetVersionRange; private final ActorDefinitionService actorDefinitionService; diff --git a/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java b/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java index 1dc6f676a4e..dc803e66033 100644 --- a/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java +++ b/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java @@ -16,6 +16,7 @@ import io.airbyte.commons.version.AirbyteVersion; import io.airbyte.commons.version.Version; import io.airbyte.config.Configs.DeploymentMode; +import io.airbyte.config.Configs.SeedDefinitionsProviderType; import io.airbyte.config.init.AirbyteCompatibleConnectorsValidator; import io.airbyte.config.init.ApplyDefinitionsHelper; import io.airbyte.config.init.BreakingChangeNotificationHelper; @@ -60,7 +61,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import javax.sql.DataSource; -import lombok.val; import org.flywaydb.core.Flyway; import org.jooq.SQLDialect; import org.junit.jupiter.api.AfterEach; @@ -85,6 +85,7 @@ class BootloaderTest { private FeatureFlagClient featureFlagClient; private static final String DEFAULT_REALM = "airbyte"; private static final String DOCKER = "docker"; + private static final SeedDefinitionsProviderType SEED_PROVIDER_TYPE = SeedDefinitionsProviderType.LOCAL; private static final String PROTOCOL_VERSION_001 = "0.0.1"; private static final String PROTOCOL_VERSION_124 = "1.2.4"; private static final String VERSION_0330_ALPHA = "0.33.0-alpha"; @@ -97,7 +98,7 @@ class BootloaderTest { // ⚠️ This line should change with every new migration to show that you meant to make a new // migration to the prod database - private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "1.1.0.005"; + private static final String CURRENT_CONFIGS_MIGRATION_VERSION = "1.1.0.008"; private static final String CURRENT_JOBS_MIGRATION_VERSION = "1.1.0.000"; @BeforeEach @@ -128,98 +129,99 @@ void cleanup() throws Exception { @Test void testBootloaderAppBlankDb() throws Exception { - val currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); + var currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); // The protocol version range should contain our default protocol version since many definitions we // load don't provide a protocol version. - val airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_001), new Version(PROTOCOL_VERSION_124)); - val runMigrationOnStartup = true; + var airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_001), new Version(PROTOCOL_VERSION_124)); + var runMigrationOnStartup = true; - val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + var configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + var jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - val configsFlyway = createConfigsFlyway(configsDataSource); - val jobsFlyway = createJobsFlyway(jobsDataSource); + var configsFlyway = createConfigsFlyway(configsDataSource); + var jobsFlyway = createJobsFlyway(jobsDataSource); - val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); - val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); + var configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); + var jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); - val connectionService = new ConnectionServiceJooqImpl(configDatabase); - val actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); - val scopedConfigurationService = mock(ScopedConfigurationService.class); - val actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( + var connectionService = new ConnectionServiceJooqImpl(configDatabase); + var actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); + var scopedConfigurationService = mock(ScopedConfigurationService.class); + var actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); - val destinationService = new DestinationServiceJooqImpl(configDatabase, + var destinationService = new DestinationServiceJooqImpl(configDatabase, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater); - val sourceService = new SourceServiceJooqImpl(configDatabase, + var sourceService = new SourceServiceJooqImpl(configDatabase, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater); - val workspaceService = new WorkspaceServiceJooqImpl(configDatabase, + var workspaceService = new WorkspaceServiceJooqImpl(configDatabase, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService); - val configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, + var configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); + var configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, configsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH)); - val configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); + var configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); final DefinitionsProvider definitionsProvider = new LocalDefinitionsProvider(); - val jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, + var jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); + var jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, jobsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH)); - val jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - val jobsPersistence = new DefaultJobPersistence(jobDatabase); - val organizationPersistence = new OrganizationPersistence(jobDatabase); - val protocolVersionChecker = + var jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); + var jobsPersistence = new DefaultJobPersistence(jobDatabase); + var organizationPersistence = new OrganizationPersistence(jobDatabase); + var protocolVersionChecker = new ProtocolVersionChecker(jobsPersistence, airbyteProtocolRange, actorDefinitionService, definitionsProvider, sourceService, destinationService); - val breakingChangeNotificationHelper = new BreakingChangeNotificationHelper(workspaceService, featureFlagClient); - val breakingChangeHelper = new BreakingChangesHelper(scopedConfigurationService, workspaceService, destinationService, sourceService); - val supportStateUpdater = + var breakingChangeNotificationHelper = new BreakingChangeNotificationHelper(workspaceService, featureFlagClient); + var breakingChangeHelper = new BreakingChangesHelper(scopedConfigurationService, workspaceService, destinationService, sourceService); + var supportStateUpdater = new SupportStateUpdater(actorDefinitionService, sourceService, destinationService, DeploymentMode.OSS, breakingChangeHelper, breakingChangeNotificationHelper, featureFlagClient); - val metricClient = new NotImplementedMetricClient(); - val actorDefinitionVersionResolver = mock(ActorDefinitionVersionResolver.class); - val airbyteCompatibleConnectorsValidator = mock(AirbyteCompatibleConnectorsValidator.class); - val connectorRolloutService = mock(ConnectorRolloutService.class); + var metricClient = new NotImplementedMetricClient(); + var actorDefinitionVersionResolver = mock(ActorDefinitionVersionResolver.class); + var airbyteCompatibleConnectorsValidator = mock(AirbyteCompatibleConnectorsValidator.class); + var connectorRolloutService = mock(ConnectorRolloutService.class); when(airbyteCompatibleConnectorsValidator.validate(anyString(), anyString())) .thenReturn(new ConnectorPlatformCompatibilityValidationResult(true, "")); when(airbyteCompatibleConnectorsValidator.validateDeclarativeManifest(anyString())) .thenReturn(new ConnectorPlatformCompatibilityValidationResult(true, "")); - val applyDefinitionsHelper = - new ApplyDefinitionsHelper(definitionsProvider, jobsPersistence, actorDefinitionService, sourceService, destinationService, + var applyDefinitionsHelper = + new ApplyDefinitionsHelper(definitionsProvider, SEED_PROVIDER_TYPE, jobsPersistence, actorDefinitionService, sourceService, + destinationService, metricClient, supportStateUpdater, actorDefinitionVersionResolver, airbyteCompatibleConnectorsValidator, connectorRolloutService); final DeclarativeManifestImageVersionsProvider declarativeManifestImageVersionsProvider = new LocalDeclarativeManifestImageVersionsProvider(); - val declarativeSourceUpdater = + var declarativeSourceUpdater = new DeclarativeSourceUpdater(declarativeManifestImageVersionsProvider, mock(DeclarativeManifestImageVersionService.class), - actorDefinitionService, airbyteCompatibleConnectorsValidator); - val authKubeSecretInitializer = mock(AuthKubernetesSecretInitializer.class); - val postLoadExecutor = + actorDefinitionService, airbyteCompatibleConnectorsValidator, featureFlagClient); + var authKubeSecretInitializer = mock(AuthKubernetesSecretInitializer.class); + var postLoadExecutor = new DefaultPostLoadExecutor(applyDefinitionsHelper, declarativeSourceUpdater, Optional.of(authKubeSecretInitializer)); - val bootloader = + var bootloader = new Bootloader(false, workspaceService, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, organizationPersistence, protocolVersionChecker, runMigrationOnStartup, DEFAULT_REALM, postLoadExecutor); bootloader.load(); - val jobsMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); + var jobsMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); assertEquals(CURRENT_JOBS_MIGRATION_VERSION, jobsMigrator.getLatestMigration().getVersion().getVersion()); - val configsMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); + var configsMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); assertEquals(CURRENT_CONFIGS_MIGRATION_VERSION, configsMigrator.getLatestMigration().getVersion().getVersion()); assertEquals(VERSION_0330_ALPHA, jobsPersistence.getVersion().get()); @@ -235,79 +237,80 @@ void testBootloaderAppBlankDb() throws Exception { @SuppressWarnings("VariableDeclarationUsageDistance") @Test void testRequiredVersionUpgradePredicate() throws Exception { - val currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); - val airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_001), new Version(PROTOCOL_VERSION_124)); - val runMigrationOnStartup = true; - - val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - - val configsFlyway = createConfigsFlyway(configsDataSource); - val jobsFlyway = createJobsFlyway(jobsDataSource); - - val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); - val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); - val connectionService = new ConnectionServiceJooqImpl(configDatabase); - val actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); - val scopedConfigurationService = mock(ScopedConfigurationService.class); - val actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( + var currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); + var airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_001), new Version(PROTOCOL_VERSION_124)); + var runMigrationOnStartup = true; + + var configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + var jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + + var configsFlyway = createConfigsFlyway(configsDataSource); + var jobsFlyway = createJobsFlyway(jobsDataSource); + + var configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); + var jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); + var connectionService = new ConnectionServiceJooqImpl(configDatabase); + var actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); + var scopedConfigurationService = mock(ScopedConfigurationService.class); + var actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); - val sourceService = new SourceServiceJooqImpl(configDatabase, + var sourceService = new SourceServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), connectionService, actorDefinitionVersionUpdater); - val destinationService = new DestinationServiceJooqImpl(configDatabase, + var destinationService = new DestinationServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), connectionService, actorDefinitionVersionUpdater); - val workspaceService = new WorkspaceServiceJooqImpl(configDatabase, + var workspaceService = new WorkspaceServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class)); - val configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, + var configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); + var configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, configsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH)); - val configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); + var configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); final DefinitionsProvider definitionsProvider = new LocalDefinitionsProvider(); - val jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, + var jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); + var jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, jobsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH)); - val jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - val jobsPersistence = new DefaultJobPersistence(jobDatabase); - val organizationPersistence = new OrganizationPersistence(jobDatabase); - val breakingChangeNotificationHelper = new BreakingChangeNotificationHelper(workspaceService, featureFlagClient); - val breakingChangesHelper = new BreakingChangesHelper(scopedConfigurationService, workspaceService, destinationService, sourceService); - val supportStateUpdater = + var jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); + var jobsPersistence = new DefaultJobPersistence(jobDatabase); + var organizationPersistence = new OrganizationPersistence(jobDatabase); + var breakingChangeNotificationHelper = new BreakingChangeNotificationHelper(workspaceService, featureFlagClient); + var breakingChangesHelper = new BreakingChangesHelper(scopedConfigurationService, workspaceService, destinationService, sourceService); + var supportStateUpdater = new SupportStateUpdater(actorDefinitionService, sourceService, destinationService, DeploymentMode.OSS, breakingChangesHelper, breakingChangeNotificationHelper, featureFlagClient); - val protocolVersionChecker = + var protocolVersionChecker = new ProtocolVersionChecker(jobsPersistence, airbyteProtocolRange, actorDefinitionService, definitionsProvider, sourceService, destinationService); - val metricClient = new NotImplementedMetricClient(); - val actorDefinitionVersionResolver = mock(ActorDefinitionVersionResolver.class); - val airbyteCompatibleConnectorsValidator = mock(AirbyteCompatibleConnectorsValidator.class); - val connectorRolloutService = mock(ConnectorRolloutService.class); - val applyDefinitionsHelper = - new ApplyDefinitionsHelper(definitionsProvider, jobsPersistence, actorDefinitionService, sourceService, destinationService, + var metricClient = new NotImplementedMetricClient(); + var actorDefinitionVersionResolver = mock(ActorDefinitionVersionResolver.class); + var airbyteCompatibleConnectorsValidator = mock(AirbyteCompatibleConnectorsValidator.class); + var connectorRolloutService = mock(ConnectorRolloutService.class); + var applyDefinitionsHelper = + new ApplyDefinitionsHelper(definitionsProvider, SEED_PROVIDER_TYPE, jobsPersistence, actorDefinitionService, sourceService, + destinationService, metricClient, supportStateUpdater, actorDefinitionVersionResolver, airbyteCompatibleConnectorsValidator, connectorRolloutService); final DeclarativeManifestImageVersionsProvider declarativeManifestImageVersionsProvider = new LocalDeclarativeManifestImageVersionsProvider(); - val declarativeSourceUpdater = + var declarativeSourceUpdater = new DeclarativeSourceUpdater(declarativeManifestImageVersionsProvider, mock(DeclarativeManifestImageVersionService.class), - actorDefinitionService, airbyteCompatibleConnectorsValidator); - val authKubeSecretInitializer = mock(AuthKubernetesSecretInitializer.class); - val postLoadExecutor = new DefaultPostLoadExecutor(applyDefinitionsHelper, declarativeSourceUpdater, Optional.of(authKubeSecretInitializer)); + actorDefinitionService, airbyteCompatibleConnectorsValidator, featureFlagClient); + var authKubeSecretInitializer = mock(AuthKubernetesSecretInitializer.class); + var postLoadExecutor = new DefaultPostLoadExecutor(applyDefinitionsHelper, declarativeSourceUpdater, Optional.of(authKubeSecretInitializer)); - val bootloader = + var bootloader = new Bootloader(false, workspaceService, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, organizationPersistence, protocolVersionChecker, runMigrationOnStartup, DEFAULT_REALM, postLoadExecutor); @@ -362,22 +365,22 @@ void testRequiredVersionUpgradePredicate() throws Exception { @Test void testPostLoadExecutionExecutes() throws Exception { final var testTriggered = new AtomicBoolean(); - val currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); - val airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_001), new Version(PROTOCOL_VERSION_124)); - val runMigrationOnStartup = true; - - val configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - val jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); - - val configsFlyway = createConfigsFlyway(configsDataSource); - val jobsFlyway = createJobsFlyway(jobsDataSource); - - val configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); - val jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); - val connectionService = new ConnectionServiceJooqImpl(configDatabase); - val actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); - val scopedConfigurationService = mock(ScopedConfigurationService.class); - val actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( + var currentAirbyteVersion = new AirbyteVersion(VERSION_0330_ALPHA); + var airbyteProtocolRange = new AirbyteProtocolVersionRange(new Version(PROTOCOL_VERSION_001), new Version(PROTOCOL_VERSION_124)); + var runMigrationOnStartup = true; + + var configsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + var jobsDslContext = DSLContextFactory.create(configsDataSource, SQLDialect.POSTGRES); + + var configsFlyway = createConfigsFlyway(configsDataSource); + var jobsFlyway = createJobsFlyway(jobsDataSource); + + var configDatabase = new ConfigsDatabaseTestProvider(configsDslContext, configsFlyway).create(false); + var jobDatabase = new JobsDatabaseTestProvider(jobsDslContext, jobsFlyway).create(false); + var connectionService = new ConnectionServiceJooqImpl(configDatabase); + var actorDefinitionService = new ActorDefinitionServiceJooqImpl(configDatabase); + var scopedConfigurationService = mock(ScopedConfigurationService.class); + var actorDefinitionVersionUpdater = new ActorDefinitionVersionUpdater( featureFlagClient, connectionService, actorDefinitionService, @@ -386,40 +389,40 @@ void testPostLoadExecutionExecutes() throws Exception { final SecretsRepositoryReader secretsRepositoryReader = mock(SecretsRepositoryReader.class); final SecretsRepositoryWriter secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); final SecretPersistenceConfigService secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); - val workspaceService = new WorkspaceServiceJooqImpl(configDatabase, + var workspaceService = new WorkspaceServiceJooqImpl(configDatabase, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService); - val sourceService = new SourceServiceJooqImpl(configDatabase, + var sourceService = new SourceServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), connectionService, actorDefinitionVersionUpdater); - val destinationService = new DestinationServiceJooqImpl(configDatabase, + var destinationService = new DestinationServiceJooqImpl(configDatabase, featureFlagClient, mock(SecretsRepositoryReader.class), mock(SecretsRepositoryWriter.class), mock(SecretPersistenceConfigService.class), connectionService, actorDefinitionVersionUpdater); - val configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, + var configsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); + var configDatabaseInitializer = DatabaseCheckFactory.createConfigsDatabaseInitializer(configsDslContext, configsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.CONFIGS_INITIAL_SCHEMA_PATH)); - val configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); + var configsDatabaseMigrator = new ConfigsDatabaseMigrator(configDatabase, configsFlyway); final DefinitionsProvider definitionsProvider = new LocalDefinitionsProvider(); - val jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); - val jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, + var jobsDatabaseInitializationTimeoutMs = TimeUnit.SECONDS.toMillis(60L); + var jobsDatabaseInitializer = DatabaseCheckFactory.createJobsDatabaseInitializer(jobsDslContext, jobsDatabaseInitializationTimeoutMs, MoreResources.readResource(DatabaseConstants.JOBS_INITIAL_SCHEMA_PATH)); - val jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); - val jobsPersistence = new DefaultJobPersistence(jobDatabase); - val organizationPersistence = new OrganizationPersistence(jobDatabase); - val protocolVersionChecker = + var jobsDatabaseMigrator = new JobsDatabaseMigrator(jobDatabase, jobsFlyway); + var jobsPersistence = new DefaultJobPersistence(jobDatabase); + var organizationPersistence = new OrganizationPersistence(jobDatabase); + var protocolVersionChecker = new ProtocolVersionChecker(jobsPersistence, airbyteProtocolRange, actorDefinitionService, definitionsProvider, sourceService, destinationService); - val postLoadExecutor = new PostLoadExecutor() { + var postLoadExecutor = new PostLoadExecutor() { @Override public void execute() { @@ -427,7 +430,7 @@ public void execute() { } }; - val bootloader = + var bootloader = new Bootloader(false, workspaceService, configDatabaseInitializer, configsDatabaseMigrator, currentAirbyteVersion, jobsDatabaseInitializer, jobsDatabaseMigrator, jobsPersistence, organizationPersistence, protocolVersionChecker, runMigrationOnStartup, DEFAULT_REALM, postLoadExecutor); diff --git a/airbyte-commons-auth/build.gradle.kts b/airbyte-commons-auth/build.gradle.kts index 87f006e3bd3..f688cedc480 100644 --- a/airbyte-commons-auth/build.gradle.kts +++ b/airbyte-commons-auth/build.gradle.kts @@ -7,8 +7,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) @@ -50,33 +48,39 @@ tasks.withType().configureEach { duplicatesStrategy = DuplicatesStrategy.EXCLUDE } -val generateIntents = tasks.register("generateIntents") { - - doLast { - // Load YAML data - val intentsYaml = file("src/main/resources/intents.yaml") - val yaml = Yaml() - val data = yaml.load>(FileReader(intentsYaml)) - val intentsData = data["intents"] as? Map<*, *> - - // Generate the Intent enum class as a string - val enumEntries = intentsData?.map { (key, value) -> - // Safely cast value and extract roles - val details = value as? Map<*, *> - val roles = (details?.get("roles") as? List<*>)?.filterIsInstance() ?: emptyList() - val rolesString = roles.joinToString(", ") { "\"$it\"" } - "$key(setOf($rolesString))" - }?.joinToString(",\n ") ?: "" - - // read the intent-template.txt and replace with the generated enum entries - val intentClassContent = file("src/main/resources/intent-class-template.txt").readText().replace("", enumEntries) - - val buildDirPath = layout.buildDirectory.asFile.get().absolutePath - val outputDir = File(buildDirPath, "generated/intents/io/airbyte-commons-auth/generated") - outputDir.mkdirs() - File(outputDir, "Intent.kt").writeText(intentClassContent) +val generateIntents = + tasks.register("generateIntents") { + + doLast { + // Load YAML data + val intentsYaml = file("src/main/resources/intents.yaml") + val yaml = Yaml() + val data = yaml.load>(FileReader(intentsYaml)) + val intentsData = data["intents"] as? Map<*, *> + + // Generate the Intent enum class as a string + val enumEntries = + intentsData + ?.map { (key, value) -> + // Safely cast value and extract roles + val details = value as? Map<*, *> + val roles = (details?.get("roles") as? List<*>)?.filterIsInstance() ?: emptyList() + val rolesString = roles.joinToString(", ") { "\"$it\"" } + "$key(setOf($rolesString))" + }?.joinToString(",\n ") ?: "" + + // read the intent-template.txt and replace with the generated enum entries + val intentClassContent = file("src/main/resources/intent-class-template.txt").readText().replace("", enumEntries) + + val buildDirPath = + layout.buildDirectory.asFile + .get() + .absolutePath + val outputDir = File(buildDirPath, "generated/intents/io/airbyte-commons-auth/generated") + outputDir.mkdirs() + File(outputDir, "Intent.kt").writeText(intentClassContent) + } } -} tasks.named("compileKotlin") { dependsOn(generateIntents) @@ -85,7 +89,7 @@ tasks.named("compileKotlin") { kotlin { sourceSets["main"].apply { kotlin.srcDir( - "${project.layout.buildDirectory.get()}/generated/intents/io/airbyte-commons-auth/generated" + "${project.layout.buildDirectory.get()}/generated/intents/io/airbyte-commons-auth/generated", ) } -} \ No newline at end of file +} diff --git a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/support/JwtTokenParser.java b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/support/JwtTokenParser.java index 879e4affef7..5d91b5b89fb 100644 --- a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/support/JwtTokenParser.java +++ b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/support/JwtTokenParser.java @@ -8,18 +8,21 @@ import com.fasterxml.jackson.databind.ObjectMapper; import io.airbyte.commons.json.Jsons; import io.airbyte.config.AuthProvider; +import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.HashMap; import java.util.Map; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper function to parse out JWT token. */ -@Slf4j public class JwtTokenParser { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static final String JWT_SSO_REALM = "sso_realm"; public static final String JWT_USER_NAME = "user_name"; public static final String JWT_USER_EMAIL = "user_email"; diff --git a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/support/JwtUserAuthenticationResolver.java b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/support/JwtUserAuthenticationResolver.java index 22043e58cc3..846b7c7baf5 100644 --- a/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/support/JwtUserAuthenticationResolver.java +++ b/airbyte-commons-auth/src/main/java/io/airbyte/commons/auth/support/JwtUserAuthenticationResolver.java @@ -13,16 +13,19 @@ import io.airbyte.config.AuthenticatedUser; import io.micronaut.security.utils.SecurityService; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.util.Optional; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Resolves JWT into UserRead object. */ @Singleton -@Slf4j public class JwtUserAuthenticationResolver implements UserAuthenticationResolver { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final Optional securityService; public JwtUserAuthenticationResolver(final Optional securityService) { diff --git a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/CatalogClientConverters.java b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/CatalogClientConverters.java index fd0027958da..bbf9107e2cf 100644 --- a/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/CatalogClientConverters.java +++ b/airbyte-commons-converters/src/main/java/io/airbyte/commons/converters/CatalogClientConverters.java @@ -216,7 +216,7 @@ private List toConfiguredMappers(final @Nullable List { final String mapperName = mapperConfig.getType().toString(); final Mapper mapper = mappers.get(mapperName); - return mapper.spec().deserialize(new ConfiguredMapper(mapperName, mapperConfig.getMapperConfiguration())); + return mapper.spec().deserialize(new ConfiguredMapper(mapperName, mapperConfig.getMapperConfiguration(), mapperConfig.getId())); }) .collect(Collectors.toList()); } diff --git a/airbyte-commons-converters/src/test/java/io/airbyte/commons/converters/CatalogClientConvertersTest.java b/airbyte-commons-converters/src/test/java/io/airbyte/commons/converters/CatalogClientConvertersTest.java index f7acab0094b..f6f3c065991 100644 --- a/airbyte-commons-converters/src/test/java/io/airbyte/commons/converters/CatalogClientConvertersTest.java +++ b/airbyte-commons-converters/src/test/java/io/airbyte/commons/converters/CatalogClientConvertersTest.java @@ -27,6 +27,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.UUID; import org.junit.jupiter.api.Test; @MicronautTest @@ -99,7 +100,8 @@ void testConvertToProtocol() { @Test void testConvertInternalWithMapping() { - final HashingMapperConfig hashingMapper = MapperHelperKt.createHashingMapper(ID_FIELD_NAME); + final UUID mapperId = UUID.randomUUID(); + final HashingMapperConfig hashingMapper = MapperHelperKt.createHashingMapper(ID_FIELD_NAME, mapperId); final var streamConfig = new io.airbyte.api.client.model.generated.AirbyteStreamConfiguration( io.airbyte.api.client.model.generated.SyncMode.FULL_REFRESH, @@ -112,7 +114,7 @@ void testConvertInternalWithMapping() { null, null, null, - List.of(new ConfiguredStreamMapper(StreamMapperType.HASHING, Jsons.jsonNode(hashingMapper.getConfig()))), + List.of(new ConfiguredStreamMapper(StreamMapperType.HASHING, Jsons.jsonNode(hashingMapper.getConfig()), mapperId)), null, null, null); @@ -129,7 +131,7 @@ void testConvertInternalWithMapping() { assertEquals(1, stream.getFields().size()); assertEquals(1, stream.getMappers().size()); assertEquals(fieldGenerator.getFieldsFromSchema(stream.getStream().getJsonSchema()), stream.getFields()); - assertEquals(MapperHelperKt.createHashingMapper(ID_FIELD_NAME), stream.getMappers().getFirst()); + assertEquals(hashingMapper, stream.getMappers().getFirst()); } @Test diff --git a/airbyte-commons-micronaut-security/build.gradle.kts b/airbyte-commons-micronaut-security/build.gradle.kts index a385af04df2..1eb7251af6d 100644 --- a/airbyte-commons-micronaut-security/build.gradle.kts +++ b/airbyte-commons-micronaut-security/build.gradle.kts @@ -4,9 +4,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) @@ -17,8 +14,6 @@ dependencies { implementation(project(":oss:airbyte-commons")) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-commons-protocol/build.gradle.kts b/airbyte-commons-protocol/build.gradle.kts index 8a2fac14b41..ec340be20d1 100644 --- a/airbyte-commons-protocol/build.gradle.kts +++ b/airbyte-commons-protocol/build.gradle.kts @@ -4,8 +4,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(libs.bundles.micronaut.annotation.processor) implementation(project(":oss:airbyte-commons")) diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/CatalogDiffHelpers.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/CatalogDiffHelpers.java index 71126b7b9e7..9ee18289335 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/CatalogDiffHelpers.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/CatalogDiffHelpers.java @@ -7,11 +7,11 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Sets; -import io.airbyte.commons.protocol.transform_models.FieldTransform; -import io.airbyte.commons.protocol.transform_models.StreamAttributeTransform; -import io.airbyte.commons.protocol.transform_models.StreamTransform; -import io.airbyte.commons.protocol.transform_models.UpdateFieldSchemaTransform; -import io.airbyte.commons.protocol.transform_models.UpdateStreamTransform; +import io.airbyte.commons.protocol.transformmodels.FieldTransform; +import io.airbyte.commons.protocol.transformmodels.StreamAttributeTransform; +import io.airbyte.commons.protocol.transformmodels.StreamTransform; +import io.airbyte.commons.protocol.transformmodels.UpdateFieldSchemaTransform; +import io.airbyte.commons.protocol.transformmodels.UpdateStreamTransform; import io.airbyte.config.ConfiguredAirbyteCatalog; import io.airbyte.config.ConfiguredAirbyteStream; import io.airbyte.config.DestinationSyncMode; diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/serde/AirbyteMessageGenericDeserializer.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/serde/AirbyteMessageGenericDeserializer.java index 0d50fa5ae8b..97053f96637 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/serde/AirbyteMessageGenericDeserializer.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/serde/AirbyteMessageGenericDeserializer.java @@ -7,7 +7,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.version.Version; import java.util.Optional; -import lombok.Getter; /** * Default Airbyte Protocol deserializer. @@ -16,7 +15,6 @@ */ public class AirbyteMessageGenericDeserializer implements AirbyteMessageDeserializer { - @Getter final Version targetVersion; final Class typeClass; @@ -30,4 +28,9 @@ public Optional deserializeExact(final String json) { return Jsons.tryDeserializeExact(json, typeClass); } + @Override + public Version getTargetVersion() { + return targetVersion; + } + } diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/serde/AirbyteMessageGenericSerializer.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/serde/AirbyteMessageGenericSerializer.java index 9a7bc945a29..d79bf6954e7 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/serde/AirbyteMessageGenericSerializer.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/serde/AirbyteMessageGenericSerializer.java @@ -6,23 +6,28 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.version.Version; -import lombok.AllArgsConstructor; -import lombok.Getter; /** * Default Airbyte Protocol serializer. * * @param object type */ -@AllArgsConstructor public class AirbyteMessageGenericSerializer implements AirbyteMessageSerializer { - @Getter private final Version targetVersion; + public AirbyteMessageGenericSerializer(Version targetVersion) { + this.targetVersion = targetVersion; + } + @Override public String serialize(T message) { return Jsons.serialize(message); } + @Override + public Version getTargetVersion() { + return targetVersion; + } + } diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/AddFieldTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/AddFieldTransform.java deleted file mode 100644 index c57046bd817..00000000000 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/AddFieldTransform.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.transform_models; - -import com.fasterxml.jackson.databind.JsonNode; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.ToString; - -/** - * Represents the addition of a field to an {@link io.airbyte.protocol.models.AirbyteStream}. - */ -@AllArgsConstructor -@EqualsAndHashCode -@ToString -public class AddFieldTransform { - - private final JsonNode schema; - - public JsonNode getSchema() { - return schema; - } - -} diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/AddStreamTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/AddStreamTransform.java deleted file mode 100644 index a5263901253..00000000000 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/AddStreamTransform.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.transform_models; - -import io.airbyte.protocol.models.StreamDescriptor; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.ToString; - -/** - * Represents the addition of an {@link io.airbyte.protocol.models.AirbyteStream} to a - * {@link io.airbyte.protocol.models.AirbyteCatalog}. - */ -@AllArgsConstructor -@EqualsAndHashCode -@ToString -public class AddStreamTransform { - - private final StreamDescriptor streamDescriptor; - - public StreamDescriptor getStreamDescriptor() { - return streamDescriptor; - } - -} diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/RemoveFieldTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/RemoveFieldTransform.java deleted file mode 100644 index 7c65934a242..00000000000 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/RemoveFieldTransform.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.transform_models; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.ArrayList; -import java.util.List; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.ToString; - -/** - * Represents the removal of a field to an {@link io.airbyte.protocol.models.AirbyteStream}. - */ -@AllArgsConstructor -@EqualsAndHashCode -@ToString -public class RemoveFieldTransform { - - private final List fieldName; - private final JsonNode schema; - - public List getFieldName() { - return new ArrayList<>(fieldName); - } - - public JsonNode getSchema() { - return schema; - } - -} diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/RemoveStreamTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/RemoveStreamTransform.java deleted file mode 100644 index 28c111f2e71..00000000000 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/RemoveStreamTransform.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.transform_models; - -import io.airbyte.protocol.models.StreamDescriptor; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.ToString; - -/** - * Represents the removal of an {@link io.airbyte.protocol.models.AirbyteStream} to a - * {@link io.airbyte.protocol.models.AirbyteCatalog}. - */ -@AllArgsConstructor -@EqualsAndHashCode -@ToString -public class RemoveStreamTransform { - - private final StreamDescriptor streamDescriptor; - - public StreamDescriptor getStreamDescriptor() { - return streamDescriptor; - } - -} diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamAttributeTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamAttributeTransform.java deleted file mode 100644 index 7cc4a696b56..00000000000 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamAttributeTransform.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.transform_models; - -import java.util.List; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.ToString; - -/** - * Represents the diff between two fields. - */ -@Getter -@AllArgsConstructor -@EqualsAndHashCode -@ToString -public final class StreamAttributeTransform { - - private final StreamAttributeTransformType transformType; - private final UpdateStreamAttributePrimaryKeyTransform updatePrimaryKeyTransform; - private final boolean breaking; - - public static StreamAttributeTransform createUpdatePrimaryKeyTransform(final List> oldPrimaryKey, - final List> newPrimaryKey, - final Boolean breaking) { - return new StreamAttributeTransform(StreamAttributeTransformType.UPDATE_PRIMARY_KEY, - new UpdateStreamAttributePrimaryKeyTransform(oldPrimaryKey, newPrimaryKey), - breaking); - } - - public static StreamAttributeTransform createUpdatePrimaryKeyTransform(final UpdateStreamAttributePrimaryKeyTransform updatePrimaryKeyTransform, - final Boolean breaking) { - return new StreamAttributeTransform(StreamAttributeTransformType.UPDATE_PRIMARY_KEY, updatePrimaryKeyTransform, - breaking); - } - -} diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateFieldSchemaTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateFieldSchemaTransform.java deleted file mode 100644 index c9d6ac6f888..00000000000 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateFieldSchemaTransform.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.transform_models; - -import com.fasterxml.jackson.databind.JsonNode; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.ToString; - -/** - * Represents the update of a field. - */ -@AllArgsConstructor -@EqualsAndHashCode -@ToString -public class UpdateFieldSchemaTransform { - - private final JsonNode oldSchema; - private final JsonNode newSchema; - - public JsonNode getOldSchema() { - return oldSchema; - } - - public JsonNode getNewSchema() { - return newSchema; - } - -} diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateStreamAttributePrimaryKeyTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateStreamAttributePrimaryKeyTransform.java deleted file mode 100644 index 4250f41e587..00000000000 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateStreamAttributePrimaryKeyTransform.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.transform_models; - -import java.util.List; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.ToString; - -/** - * Represents the update of a stream attribute. - */ -@Getter -@AllArgsConstructor -@EqualsAndHashCode -@ToString -public class UpdateStreamAttributePrimaryKeyTransform { - - private final List> oldPrimaryKey; - private final List> newPrimaryKey; - -} diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateStreamTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateStreamTransform.java deleted file mode 100644 index ebd82ceeac3..00000000000 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/UpdateStreamTransform.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.protocol.transform_models; - -import java.util.HashSet; -import java.util.Set; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.ToString; - -/** - * Represents the update of an {@link io.airbyte.protocol.models.AirbyteStream}. - */ -@AllArgsConstructor -@EqualsAndHashCode -@ToString -public class UpdateStreamTransform { - - private final Set fieldTransforms; - private final Set attributeTransforms; - - public Set getFieldTransforms() { - return new HashSet<>(fieldTransforms); - } - - public Set getAttributeTransforms() { - return new HashSet<>(attributeTransforms); - } - -} diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/FieldTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/FieldTransform.java similarity index 58% rename from airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/FieldTransform.java rename to airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/FieldTransform.java index ea2857bcde7..9f40efd5f3c 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/FieldTransform.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/FieldTransform.java @@ -2,20 +2,15 @@ * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. */ -package io.airbyte.commons.protocol.transform_models; +package io.airbyte.commons.protocol.transformmodels; import com.fasterxml.jackson.databind.JsonNode; import java.util.List; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.ToString; +import java.util.Objects; /** * Represents the diff between two fields. */ -@AllArgsConstructor -@EqualsAndHashCode -@ToString public final class FieldTransform { private final FieldTransformType transformType; @@ -25,6 +20,20 @@ public final class FieldTransform { private final UpdateFieldSchemaTransform updateFieldTransform; private final boolean breaking; + public FieldTransform(FieldTransformType transformType, + List fieldName, + AddFieldTransform addFieldTransform, + RemoveFieldTransform removeFieldTransform, + UpdateFieldSchemaTransform updateFieldTransform, + boolean breaking) { + this.transformType = transformType; + this.fieldName = fieldName; + this.addFieldTransform = addFieldTransform; + this.removeFieldTransform = removeFieldTransform; + this.updateFieldTransform = updateFieldTransform; + this.breaking = breaking; + } + public static FieldTransform createAddFieldTransform(final List fieldName, final JsonNode schema) { return createAddFieldTransform(fieldName, new AddFieldTransform(schema)); } @@ -73,4 +82,33 @@ public boolean breaking() { return breaking; } + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + FieldTransform that = (FieldTransform) o; + return breaking == that.breaking && transformType == that.transformType && Objects.equals(fieldName, that.fieldName) + && Objects.equals(addFieldTransform, that.addFieldTransform) && Objects.equals(removeFieldTransform, + that.removeFieldTransform) + && Objects.equals(updateFieldTransform, that.updateFieldTransform); + } + + @Override + public int hashCode() { + return Objects.hash(transformType, fieldName, addFieldTransform, removeFieldTransform, updateFieldTransform, breaking); + } + + @Override + public String toString() { + return "FieldTransform{" + + "transformType=" + transformType + + ", fieldName=" + fieldName + + ", addFieldTransform=" + addFieldTransform + + ", removeFieldTransform=" + removeFieldTransform + + ", updateFieldTransform=" + updateFieldTransform + + ", breaking=" + breaking + + '}'; + } + } diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/FieldTransformType.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/FieldTransformType.java similarity index 80% rename from airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/FieldTransformType.java rename to airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/FieldTransformType.java index 84b58d6ab35..81503d45942 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/FieldTransformType.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/FieldTransformType.java @@ -2,7 +2,7 @@ * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. */ -package io.airbyte.commons.protocol.transform_models; +package io.airbyte.commons.protocol.transformmodels; /** * Types of transformations possible for a field. diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamAttributeTransformType.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamAttributeTransformType.java similarity index 78% rename from airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamAttributeTransformType.java rename to airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamAttributeTransformType.java index ced5d6cf9cb..335960b2504 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamAttributeTransformType.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamAttributeTransformType.java @@ -2,7 +2,7 @@ * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. */ -package io.airbyte.commons.protocol.transform_models; +package io.airbyte.commons.protocol.transformmodels; /** * Types of possible stream attribute transformations. diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamTransform.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamTransform.java similarity index 55% rename from airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamTransform.java rename to airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamTransform.java index 9a831cb1d17..a09d21301a8 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamTransform.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamTransform.java @@ -2,25 +2,26 @@ * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. */ -package io.airbyte.commons.protocol.transform_models; +package io.airbyte.commons.protocol.transformmodels; import io.airbyte.config.StreamDescriptor; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.ToString; +import java.util.Objects; /** * Represents the diff between two {@link io.airbyte.protocol.models.AirbyteStream}. */ -@AllArgsConstructor -@EqualsAndHashCode -@ToString public final class StreamTransform { private final StreamTransformType transformType; private final StreamDescriptor streamDescriptor; private final UpdateStreamTransform updateStreamTransform; + public StreamTransform(StreamTransformType transformType, StreamDescriptor streamDescriptor, UpdateStreamTransform updateStreamTransform) { + this.transformType = transformType; + this.streamDescriptor = streamDescriptor; + this.updateStreamTransform = updateStreamTransform; + } + public static StreamTransform createAddStreamTransform(final StreamDescriptor streamDescriptor) { return new StreamTransform(StreamTransformType.ADD_STREAM, streamDescriptor, null); } @@ -46,4 +47,28 @@ public UpdateStreamTransform getUpdateStreamTransform() { return updateStreamTransform; } + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + StreamTransform that = (StreamTransform) o; + return transformType == that.transformType && Objects.equals(streamDescriptor, that.streamDescriptor) && Objects.equals( + updateStreamTransform, that.updateStreamTransform); + } + + @Override + public int hashCode() { + return Objects.hash(transformType, streamDescriptor, updateStreamTransform); + } + + @Override + public String toString() { + return "StreamTransform{" + + "transformType=" + transformType + + ", streamDescriptor=" + streamDescriptor + + ", updateStreamTransform=" + updateStreamTransform + + '}'; + } + } diff --git a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamTransformType.java b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamTransformType.java similarity index 79% rename from airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamTransformType.java rename to airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamTransformType.java index 812fef0ada5..89c6dde6e36 100644 --- a/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transform_models/StreamTransformType.java +++ b/airbyte-commons-protocol/src/main/java/io/airbyte/commons/protocol/transformmodels/StreamTransformType.java @@ -2,7 +2,7 @@ * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. */ -package io.airbyte.commons.protocol.transform_models; +package io.airbyte.commons.protocol.transformmodels; /** * Types of transformations possible for a stream. diff --git a/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/AddFieldTransform.kt b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/AddFieldTransform.kt new file mode 100644 index 00000000000..eeb7f8e9d93 --- /dev/null +++ b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/AddFieldTransform.kt @@ -0,0 +1,7 @@ +package io.airbyte.commons.protocol.transformmodels + +import com.fasterxml.jackson.databind.JsonNode + +data class AddFieldTransform( + val schema: JsonNode?, +) diff --git a/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/RemoveFieldTransform.kt b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/RemoveFieldTransform.kt new file mode 100644 index 00000000000..a075f27a614 --- /dev/null +++ b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/RemoveFieldTransform.kt @@ -0,0 +1,11 @@ +package io.airbyte.commons.protocol.transformmodels + +import com.fasterxml.jackson.databind.JsonNode + +/** + * Represents the removal of a field to an {@link io.airbyte.protocol.models.AirbyteStream}. + */ +data class RemoveFieldTransform( + val fieldName: List?, + val schema: JsonNode?, +) diff --git a/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/StreamAttributeTransform.kt b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/StreamAttributeTransform.kt new file mode 100644 index 00000000000..aac15594e3d --- /dev/null +++ b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/StreamAttributeTransform.kt @@ -0,0 +1,24 @@ +package io.airbyte.commons.protocol.transformmodels + +/** + * Represents the diff between two fields. + */ +data class StreamAttributeTransform( + val transformType: StreamAttributeTransformType?, + val updatePrimaryKeyTransform: UpdateStreamAttributePrimaryKeyTransform?, + val breaking: Boolean?, +) { + companion object { + @JvmStatic + fun createUpdatePrimaryKeyTransform( + oldPrimaryKey: List>?, + newPrimaryKey: List>?, + breaking: Boolean?, + ): StreamAttributeTransform = + StreamAttributeTransform( + StreamAttributeTransformType.UPDATE_PRIMARY_KEY, + UpdateStreamAttributePrimaryKeyTransform(oldPrimaryKey, newPrimaryKey), + breaking, + ) + } +} diff --git a/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateFieldSchemaTransform.kt b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateFieldSchemaTransform.kt new file mode 100644 index 00000000000..cfffd6ace1b --- /dev/null +++ b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateFieldSchemaTransform.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.protocol.transformmodels + +import com.fasterxml.jackson.databind.JsonNode + +/** + * Represents the update of a field. + */ +data class UpdateFieldSchemaTransform( + val oldSchema: JsonNode?, + val newSchema: JsonNode?, +) diff --git a/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateStreamAttributePrimaryKeyTransform.kt b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateStreamAttributePrimaryKeyTransform.kt new file mode 100644 index 00000000000..2550e4e167e --- /dev/null +++ b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateStreamAttributePrimaryKeyTransform.kt @@ -0,0 +1,6 @@ +package io.airbyte.commons.protocol.transformmodels + +data class UpdateStreamAttributePrimaryKeyTransform( + val oldPrimaryKey: List>?, + val newPrimaryKey: List>?, +) diff --git a/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateStreamTransform.kt b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateStreamTransform.kt new file mode 100644 index 00000000000..44e7629d84a --- /dev/null +++ b/airbyte-commons-protocol/src/main/kotlin/io/airbyte/commons/protocol/transformmodels/UpdateStreamTransform.kt @@ -0,0 +1,9 @@ +package io.airbyte.commons.protocol.transformmodels + +/** +* Represents the update of an [io.airbyte.protocol.models.AirbyteStream]. +*/ +data class UpdateStreamTransform( + val fieldTransforms: Set, + val attributeTransforms: Set, +) diff --git a/airbyte-commons-protocol/src/test/java/io/airbyte/commons/protocol/CatalogDiffHelpersTest.java b/airbyte-commons-protocol/src/test/java/io/airbyte/commons/protocol/CatalogDiffHelpersTest.java index fdaa5fccd6c..330fd9982f7 100644 --- a/airbyte-commons-protocol/src/test/java/io/airbyte/commons/protocol/CatalogDiffHelpersTest.java +++ b/airbyte-commons-protocol/src/test/java/io/airbyte/commons/protocol/CatalogDiffHelpersTest.java @@ -8,12 +8,12 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.io.Resources; -import io.airbyte.commons.protocol.transform_models.FieldTransform; -import io.airbyte.commons.protocol.transform_models.StreamAttributeTransform; -import io.airbyte.commons.protocol.transform_models.StreamTransform; -import io.airbyte.commons.protocol.transform_models.StreamTransformType; -import io.airbyte.commons.protocol.transform_models.UpdateFieldSchemaTransform; -import io.airbyte.commons.protocol.transform_models.UpdateStreamTransform; +import io.airbyte.commons.protocol.transformmodels.FieldTransform; +import io.airbyte.commons.protocol.transformmodels.StreamAttributeTransform; +import io.airbyte.commons.protocol.transformmodels.StreamTransform; +import io.airbyte.commons.protocol.transformmodels.StreamTransformType; +import io.airbyte.commons.protocol.transformmodels.UpdateFieldSchemaTransform; +import io.airbyte.commons.protocol.transformmodels.UpdateStreamTransform; import io.airbyte.config.AirbyteStream; import io.airbyte.config.ConfiguredAirbyteCatalog; import io.airbyte.config.ConfiguredAirbyteStream; diff --git a/airbyte-commons-server/build.gradle.kts b/airbyte-commons-server/build.gradle.kts index beed1861bf0..11b7e696d8b 100644 --- a/airbyte-commons-server/build.gradle.kts +++ b/airbyte-commons-server/build.gradle.kts @@ -4,20 +4,14 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + annotationProcessor(platform(libs.micronaut.platform)) + annotationProcessor(libs.bundles.micronaut.annotation.processor) + annotationProcessor(libs.micronaut.jaxrs.processor) ksp(platform(libs.micronaut.platform)) ksp(libs.bundles.micronaut.annotation.processor) ksp(libs.micronaut.jaxrs.processor) - kspTest(platform(libs.micronaut.platform)) - kspTest(libs.bundles.micronaut.test.annotation.processor) - - annotationProcessor(platform(libs.micronaut.platform)) - annotationProcessor(libs.bundles.micronaut.annotation.processor) - annotationProcessor(libs.micronaut.jaxrs.processor) - implementation(platform(libs.micronaut.platform)) implementation(libs.bundles.micronaut) implementation(libs.bundles.datadog) @@ -43,6 +37,7 @@ dependencies { implementation(libs.jakarta.ws.rs.api) implementation(libs.kubernetes.client) implementation(libs.guava) + implementation(libs.cron.utils) implementation(project(":oss:airbyte-analytics")) implementation(project(":oss:airbyte-api:connector-builder-api")) @@ -75,9 +70,13 @@ dependencies { implementation(project(":oss:airbyte-persistence:job-persistence")) implementation(project(":oss:airbyte-worker-models")) implementation(project(":oss:airbyte-notification")) + implementation(project(":oss:airbyte-csp-check")) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + kspTest(platform(libs.micronaut.platform)) + kspTest(libs.bundles.micronaut.test.annotation.processor) + testImplementation(project(":oss:airbyte-test-utils")) testImplementation("org.jetbrains.kotlin:kotlin-reflect") testImplementation(libs.postgresql) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/authorization/KeycloakTokenValidator.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/authorization/KeycloakTokenValidator.java index 68177aaf8b6..82a0df5710e 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/authorization/KeycloakTokenValidator.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/authorization/KeycloakTokenValidator.java @@ -27,28 +27,31 @@ import io.micronaut.security.token.validator.TokenValidator; import jakarta.inject.Named; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Map; import java.util.Optional; -import lombok.extern.slf4j.Slf4j; import okhttp3.OkHttpClient; import okhttp3.Request; import okhttp3.Response; import org.reactivestreams.Publisher; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import reactor.core.publisher.Mono; /** * Token Validator for Airbyte Cloud and Enterprise. Performs an online validation of the token * against the Keycloak server. */ -@Slf4j @Singleton @RequiresAuthMode(AuthMode.OIDC) @SuppressWarnings({"PMD.PreserveStackTrace", "PMD.UseTryWithResources", "PMD.UnusedFormalParameter", "PMD.UnusedPrivateMethod", "PMD.ExceptionAsFlowControl"}) public class KeycloakTokenValidator implements TokenValidator> { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String EXTERNAL_USER = "external-user"; private static final String INTERNAL_SERVICE_ACCOUNT = "internal-service-account"; private static final MetricAttribute AUTHENTICATION_FAILURE_METRIC_ATTRIBUTE = new MetricAttribute(MetricTags.AUTHENTICATION_RESPONSE, "failure"); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/CatalogDiffConverters.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/CatalogDiffConverters.java index 2a524aa8021..c8c4318ed35 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/CatalogDiffConverters.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/CatalogDiffConverters.java @@ -14,9 +14,9 @@ import io.airbyte.api.model.generated.StreamTransformUpdateStream; import io.airbyte.commons.converters.ApiConverters; import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.protocol.transform_models.FieldTransformType; -import io.airbyte.commons.protocol.transform_models.StreamAttributeTransformType; -import io.airbyte.commons.protocol.transform_models.StreamTransformType; +import io.airbyte.commons.protocol.transformmodels.FieldTransformType; +import io.airbyte.commons.protocol.transformmodels.StreamAttributeTransformType; +import io.airbyte.commons.protocol.transformmodels.StreamTransformType; import java.util.Optional; /** @@ -24,7 +24,7 @@ */ public class CatalogDiffConverters { - public static StreamTransform streamTransformToApi(final io.airbyte.commons.protocol.transform_models.StreamTransform transform) { + public static StreamTransform streamTransformToApi(final io.airbyte.commons.protocol.transformmodels.StreamTransform transform) { return new StreamTransform() .transformType(Enums.convertTo(transform.getTransformType(), StreamTransform.TransformTypeEnum.class)) .streamDescriptor(ApiConverters.toApi(transform.getStreamDescriptor())) @@ -32,7 +32,7 @@ public static StreamTransform streamTransformToApi(final io.airbyte.commons.prot } @SuppressWarnings("LineLength") - public static Optional updateStreamToApi(final io.airbyte.commons.protocol.transform_models.StreamTransform transform) { + public static Optional updateStreamToApi(final io.airbyte.commons.protocol.transformmodels.StreamTransform transform) { if (transform.getTransformType() == StreamTransformType.UPDATE_STREAM) { return Optional.of(new StreamTransformUpdateStream() .streamAttributeTransforms(transform.getUpdateStreamTransform() @@ -51,14 +51,14 @@ public static Optional updateStreamToApi(final io.a } @SuppressWarnings("LineLength") - public static StreamAttributeTransform streamAttributeTransformToApi(final io.airbyte.commons.protocol.transform_models.StreamAttributeTransform transform) { + public static StreamAttributeTransform streamAttributeTransformToApi(final io.airbyte.commons.protocol.transformmodels.StreamAttributeTransform transform) { return new StreamAttributeTransform() .transformType(Enums.convertTo(transform.getTransformType(), StreamAttributeTransform.TransformTypeEnum.class)) - .breaking(transform.isBreaking()) + .breaking(transform.getBreaking()) .updatePrimaryKey(updatePrimaryKeyToApi(transform).orElse(null)); } - public static FieldTransform fieldTransformToApi(final io.airbyte.commons.protocol.transform_models.FieldTransform transform) { + public static FieldTransform fieldTransformToApi(final io.airbyte.commons.protocol.transformmodels.FieldTransform transform) { return new FieldTransform() .transformType(Enums.convertTo(transform.getTransformType(), FieldTransform.TransformTypeEnum.class)) .fieldName(transform.getFieldName()) @@ -69,7 +69,7 @@ public static FieldTransform fieldTransformToApi(final io.airbyte.commons.protoc } @SuppressWarnings("LineLength") - private static Optional updatePrimaryKeyToApi(final io.airbyte.commons.protocol.transform_models.StreamAttributeTransform transform) { + private static Optional updatePrimaryKeyToApi(final io.airbyte.commons.protocol.transformmodels.StreamAttributeTransform transform) { if (transform.getTransformType() == StreamAttributeTransformType.UPDATE_PRIMARY_KEY) { return Optional.of(new StreamAttributePrimaryKeyUpdate() .newPrimaryKey(transform.getUpdatePrimaryKeyTransform().getNewPrimaryKey()) @@ -79,7 +79,7 @@ private static Optional updatePrimaryKeyToApi(f } } - private static Optional addFieldToApi(final io.airbyte.commons.protocol.transform_models.FieldTransform transform) { + private static Optional addFieldToApi(final io.airbyte.commons.protocol.transformmodels.FieldTransform transform) { if (transform.getTransformType() == FieldTransformType.ADD_FIELD) { return Optional.of(new FieldAdd() .schema(transform.getAddFieldTransform().getSchema())); @@ -88,7 +88,7 @@ private static Optional addFieldToApi(final io.airbyte.commons.protoco } } - private static Optional removeFieldToApi(final io.airbyte.commons.protocol.transform_models.FieldTransform transform) { + private static Optional removeFieldToApi(final io.airbyte.commons.protocol.transformmodels.FieldTransform transform) { if (transform.getTransformType() == FieldTransformType.REMOVE_FIELD) { return Optional.of(new FieldRemove() .schema(transform.getRemoveFieldTransform().getSchema())); @@ -97,7 +97,7 @@ private static Optional removeFieldToApi(final io.airbyte.commons.p } } - private static Optional updateFieldToApi(final io.airbyte.commons.protocol.transform_models.FieldTransform transform) { + private static Optional updateFieldToApi(final io.airbyte.commons.protocol.transformmodels.FieldTransform transform) { if (transform.getTransformType() == FieldTransformType.UPDATE_FIELD_SCHEMA) { return Optional.of(new FieldSchemaUpdate() .oldSchema(transform.getUpdateFieldTransform().getOldSchema()) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java index 03847ffc202..7d94afc5b4f 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/converters/JobConverter.java @@ -52,10 +52,6 @@ import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.StreamSyncStats; import io.airbyte.config.SyncStats; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.StructuredLogs; -import io.airbyte.featureflag.Workspace; -import io.airbyte.persistence.job.WorkspaceHelper; import io.micronaut.core.util.CollectionUtils; import jakarta.annotation.Nullable; import jakarta.inject.Singleton; @@ -65,12 +61,9 @@ import java.util.List; import java.util.Locale; import java.util.Optional; -import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * Convert between API and internal versions of job models. @@ -78,24 +71,14 @@ @Singleton public class JobConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(JobConverter.class); - - private final FeatureFlagClient featureFlagClient; - private final LogClientManager logClientManager; private final LogUtils logUtils; - private final WorkspaceHelper workspaceHelper; - - public JobConverter(final FeatureFlagClient featureFlagClient, - final LogClientManager logClientManager, - final LogUtils logUtils, - final WorkspaceHelper workspaceHelper) { - this.featureFlagClient = featureFlagClient; + public JobConverter(final LogClientManager logClientManager, + final LogUtils logUtils) { this.logClientManager = logClientManager; this.logUtils = logUtils; - this.workspaceHelper = workspaceHelper; } public JobInfoRead getJobInfoRead(final Job job) { @@ -320,13 +303,12 @@ public LogRead getLogRead(final Path logPath) { } public AttemptInfoReadLogs getAttemptLogs(final Path logPath, final Long jobId) { - if (featureFlagClient.boolVariation(StructuredLogs.INSTANCE, new Workspace(getWorkspaceId(jobId)))) { - final LogEvents logEvents = logClientManager.getLogs(logPath); - if (CollectionUtils.isNotEmpty(logEvents.getEvents())) { - return new AttemptInfoReadLogs().events(toModelLogEvents(logEvents.getEvents(), logUtils)).version(logEvents.getVersion()); - } + final LogEvents logEvents = logClientManager.getLogs(logPath); + if (CollectionUtils.isNotEmpty(logEvents.getEvents())) { + return new AttemptInfoReadLogs().events(toModelLogEvents(logEvents.getEvents(), logUtils)).version(logEvents.getVersion()); + } else { + return new AttemptInfoReadLogs().logLines(getLogRead(logPath).getLogLines()); } - return new AttemptInfoReadLogs().logLines(getLogRead(logPath).getLogLines()); } private static FailureReason getFailureReason(final @Nullable io.airbyte.config.FailureReason failureReason, final long defaultTimestamp) { @@ -379,7 +361,7 @@ private static List toModelLogEvents(final List optionalLastJob = jobPersistence.getLastReplicationJob(connectionId); - final Optional optionalFirstJob = jobPersistence.getFirstReplicationJob(connectionId); - - if (optionalLastJob.isEmpty()) { - LOGGER.error("Auto-Disable Connection should not have been attempted if can't get latest replication job."); - return new InternalOperationResult().succeeded(false); - } - - if (optionalFirstJob.isEmpty()) { - LOGGER.error("Auto-Disable Connection should not have been attempted if no replication job has been run."); - return new InternalOperationResult().succeeded(false); - } - - final List jobs = jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, - REPLICATION_TYPES, timestamp.minus(maxDaysOfOnlyFailedJobs, DAYS)); - - int numFailures = 0; - Optional successTimestamp = Optional.empty(); - - for (final JobWithStatusAndTimestamp job : jobs) { - final JobStatus jobStatus = job.getStatus(); - if (jobStatus == JobStatus.FAILED) { - numFailures++; - } else if (jobStatus == JobStatus.SUCCEEDED) { - successTimestamp = Optional.of(job.getUpdatedAtInSecond()); - break; - } - } - - final boolean warningPreviouslySentForMaxDays = - warningPreviouslySentForMaxDays(numFailures, successTimestamp, maxDaysOfOnlyFailedJobsBeforeWarning, optionalFirstJob.get(), jobs); - final List attemptStats = new ArrayList<>(); - for (final Attempt attempt : optionalLastJob.get().getAttempts()) { - attemptStats.add(jobPersistence.getAttemptStats(optionalLastJob.get().getId(), attempt.getAttemptNumber())); - } - if (numFailures == 0) { - return new InternalOperationResult().succeeded(false); - } else if (numFailures >= maxFailedJobsInARowBeforeConnectionDisable) { - // disable connection if max consecutive failed jobs limit has been hit - autoDisableConnection(standardSync, optionalLastJob.get(), - ConnectionAutoDisabledReason.TOO_MANY_CONSECUTIVE_FAILED_JOBS_IN_A_ROW); - return new InternalOperationResult().succeeded(true); - } else if (numFailures == maxFailedJobsInARowBeforeConnectionDisableWarning && !warningPreviouslySentForMaxDays) { - // warn if number of consecutive failures hits 50% of MaxFailedJobsInARow - jobNotifier.autoDisableConnectionWarning(optionalLastJob.get(), attemptStats); - return new InternalOperationResult().succeeded(false); - } - - // calculate the number of days this connection first tried a replication job, used to ensure not to - // disable or warn for `maxDaysOfOnlyFailedJobs` if the first job is younger than - // `maxDaysOfOnlyFailedJobs` days, This avoids cases such as "the very first job run was a failure". - final int numDaysSinceFirstReplicationJob = getDaysSinceTimestamp(currTimestampInSeconds, optionalFirstJob.get().getCreatedAtInSecond()); - final boolean firstReplicationOlderThanMaxDisableDays = numDaysSinceFirstReplicationJob >= maxDaysOfOnlyFailedJobs; - final boolean noPreviousSuccess = successTimestamp.isEmpty(); - - // disable connection if only failed jobs in the past maxDaysOfOnlyFailedJobs days - if (firstReplicationOlderThanMaxDisableDays && noPreviousSuccess) { - autoDisableConnection(standardSync, optionalLastJob.get(), ConnectionAutoDisabledReason.ONLY_FAILED_JOBS_RECENTLY); - return new InternalOperationResult().succeeded(true); - } - - // skip warning if previously sent - if (warningPreviouslySentForMaxDays || numFailures > maxFailedJobsInARowBeforeConnectionDisableWarning) { - LOGGER.info("Warning was previously sent for connection: {}", connectionId); - return new InternalOperationResult().succeeded(false); - } - - final boolean firstReplicationOlderThanMaxDisableWarningDays = numDaysSinceFirstReplicationJob >= maxDaysOfOnlyFailedJobsBeforeWarning; - final boolean successOlderThanPrevFailureByMaxWarningDays = // set to true if no previous success is found - noPreviousSuccess || getDaysSinceTimestamp(currTimestampInSeconds, successTimestamp.get()) >= maxDaysOfOnlyFailedJobsBeforeWarning; - - // send warning if there are only failed jobs in the past maxDaysOfOnlyFailedJobsBeforeWarning days - // _unless_ a warning should have already been sent in the previous failure - if (firstReplicationOlderThanMaxDisableWarningDays && successOlderThanPrevFailureByMaxWarningDays) { - - jobNotifier.autoDisableConnectionWarning(optionalLastJob.get(), attemptStats); - } - return new InternalOperationResult().succeeded(false); - } - - private void autoDisableConnection(final StandardSync standardSync, final Job lastJob, final ConnectionAutoDisabledReason disabledReason) - throws IOException { - // apply patch to connection - standardSync.setStatus(Status.INACTIVE); - connectionService.writeStandardSync(standardSync); - // log connection disabled event in connection timeline - connectionTimelineEventHelper.logStatusChangedEventInConnectionTimeline(standardSync.getConnectionId(), ConnectionStatus.INACTIVE, - disabledReason.name(), true); - - final List attemptStats = new ArrayList<>(); - for (final Attempt attempt : lastJob.getAttempts()) { - attemptStats.add(jobPersistence.getAttemptStats(lastJob.getId(), attempt.getAttemptNumber())); - } - jobNotifier.autoDisableConnection(lastJob, attemptStats); - } - - private int getDaysSinceTimestamp(final long currentTimestampInSeconds, final long timestampInSeconds) { - return Math.toIntExact(TimeUnit.SECONDS.toDays(currentTimestampInSeconds - timestampInSeconds)); - } - - // Checks to see if warning should have been sent in the previous failure, if so skip sending of - // warning to avoid spam - // Assume warning has been sent if either of the following is true: - // 1. no success found in the time span and the previous failure occurred - // maxDaysOfOnlyFailedJobsBeforeWarning days after the first job - // 2. success found and the previous failure occurred maxDaysOfOnlyFailedJobsBeforeWarning days - // after that success - private boolean warningPreviouslySentForMaxDays(final int numFailures, - final Optional successTimestamp, - final int maxDaysOfOnlyFailedJobsBeforeWarning, - final Job firstJob, - final List jobs) { - // no previous warning sent if there was no previous failure - if (numFailures <= 1 || jobs.size() <= 1) { - return false; - } - - // get previous failed job (skipping first job since that's considered "current" job) - JobWithStatusAndTimestamp prevFailedJob = jobs.get(1); - for (int i = 2; i < jobs.size(); i++) { - if (prevFailedJob.getStatus() == JobStatus.FAILED) { - break; + private void assignIdsToIncomingMappers(final AirbyteCatalog catalog) { + catalog.getStreams().forEach(stream -> stream.getConfig().getMappers().forEach(mapper -> { + if (mapper.getId() == null) { + mapper.setId(uuidGenerator.get()); } - prevFailedJob = jobs.get(i); - } - - final boolean successExists = successTimestamp.isPresent(); - boolean successOlderThanPrevFailureByMaxWarningDays = false; - if (successExists) { - successOlderThanPrevFailureByMaxWarningDays = - getDaysSinceTimestamp(prevFailedJob.getUpdatedAtInSecond(), successTimestamp.get()) >= maxDaysOfOnlyFailedJobsBeforeWarning; - } - final boolean prevFailureOlderThanFirstJobByMaxWarningDays = - getDaysSinceTimestamp(prevFailedJob.getUpdatedAtInSecond(), firstJob.getUpdatedAtInSecond()) >= maxDaysOfOnlyFailedJobsBeforeWarning; - - return (successExists && successOlderThanPrevFailureByMaxWarningDays) - || (!successExists && prevFailureOlderThanFirstJobByMaxWarningDays); + })); } private void validateConfiguredMappers(final ConfiguredAirbyteCatalog configuredCatalog) { @@ -563,6 +407,7 @@ private void validateConfiguredMappers(final ConfiguredAirbyteCatalog configured .error(mapperError.getValue().getType().name()) .mapper( new ProblemMapperErrorDataMapper() + .id(mapperError.getKey().id()) .type(mapperError.getKey().name()) .mapperConfiguration(mapperError.getKey().config())))) .toList(); @@ -625,10 +470,13 @@ public ConnectionRead createConnection(final ConnectionCreate connectionCreate) validateCatalogDoesntContainDuplicateStreamNames(connectionCreate.getSyncCatalog()); validateCatalogSize(connectionCreate.getSyncCatalog(), workspaceId, "create"); + assignIdsToIncomingMappers(connectionCreate.getSyncCatalog()); final ConfiguredAirbyteCatalog configuredCatalog = catalogConverter.toConfiguredInternal(connectionCreate.getSyncCatalog()); validateConfiguredMappers(configuredCatalog); - standardSync.withCatalog(configuredCatalog); + + final ConfiguredAirbyteCatalog configuredCatalogNoSecrets = mapperSecretHelper.createAndReplaceMapperSecrets(workspaceId, configuredCatalog); + standardSync.withCatalog(configuredCatalogNoSecrets); standardSync.withFieldSelectionData(catalogConverter.getFieldSelectionData(connectionCreate.getSyncCatalog())); } else { standardSync.withCatalog(new ConfiguredAirbyteCatalog().withStreams(Collections.emptyList())); @@ -843,7 +691,6 @@ private void validateConnectionPatch(final WorkspaceHelper workspaceHelper, fina case CRON -> Preconditions.checkArgument( patch.getScheduleData() != null, "ConnectionUpdate should include scheduleData when setting the Connection scheduleType to CRON."); - // shouldn't be possible to reach this case default -> throw new RuntimeException("Unrecognized scheduleType!"); } @@ -967,19 +814,7 @@ private boolean haveConfigChange(final AirbyteStreamConfiguration oldConfig, fin final Set> convertedNewPrimaryKey = new HashSet<>(newConfig.getPrimaryKey()); final boolean hasPrimaryKeyChanged = !(convertedOldPrimaryKey.equals(convertedNewPrimaryKey)); - // TODO(pedro): This should be checked by generating the destination catalog to support all mappers - final List oldHashedFields = - oldConfig.getHashedFields() == null ? new ArrayList() : new ArrayList(oldConfig.getHashedFields()); - final List newHashedFields = - newConfig.getHashedFields() == null ? new ArrayList() : new ArrayList(newConfig.getHashedFields()); - - final Comparator fieldPathComparator = Comparator.comparing( - field -> String.join(".", field.getFieldPath())); - oldHashedFields.sort(fieldPathComparator); - newHashedFields.sort(fieldPathComparator); - final boolean hasHashedFieldsChanged = !oldHashedFields.equals(newHashedFields); - - return hasCursorChanged || hasSyncModeChanged || hasDestinationSyncModeChanged || hasPrimaryKeyChanged || hasHashedFieldsChanged; + return hasCursorChanged || hasSyncModeChanged || hasDestinationSyncModeChanged || hasPrimaryKeyChanged; } private Map catalogToPerStreamConfiguration(final AirbyteCatalog catalog) { @@ -989,6 +824,7 @@ private Map catalogToPerStreamConf AirbyteStreamAndConfiguration::getConfig)); } + @Trace public Optional getConnectionAirbyteCatalog(final UUID connectionId) throws JsonValidationException, ConfigNotFoundException, IOException { final StandardSync connection = connectionService.getStandardSync(connectionId); @@ -1025,6 +861,10 @@ public void deleteConnection(final UUID connectionId) throws JsonValidationExcep public ConnectionRead buildConnectionRead(final UUID connectionId) throws ConfigNotFoundException, IOException, JsonValidationException { final StandardSync standardSync = connectionService.getStandardSync(connectionId); + + final ConfiguredAirbyteCatalog maskedCatalog = mapperSecretHelper.maskMapperSecrets(standardSync.getCatalog()); + standardSync.setCatalog(maskedCatalog); + return apiPojoConverters.internalToConnectionRead(standardSync); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java index 721d00461bb..43ea1e4f242 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandler.java @@ -5,12 +5,17 @@ package io.airbyte.commons.server.handlers; import static io.airbyte.commons.version.AirbyteProtocolVersion.DEFAULT_AIRBYTE_PROTOCOL_VERSION; +import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.CONNECTOR_BUILDER_PROJECT_ID_KEY; +import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.WORKSPACE_ID_KEY; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import io.airbyte.api.model.generated.BaseActorDefinitionVersionInfo; import io.airbyte.api.model.generated.BuilderProjectForDefinitionRequestBody; import io.airbyte.api.model.generated.BuilderProjectForDefinitionResponse; +import io.airbyte.api.model.generated.BuilderProjectOauthConsentRequest; +import io.airbyte.api.model.generated.CompleteConnectorBuilderProjectOauthRequest; +import io.airbyte.api.model.generated.CompleteOAuthResponse; import io.airbyte.api.model.generated.ConnectorBuilderHttpRequest; import io.airbyte.api.model.generated.ConnectorBuilderHttpResponse; import io.airbyte.api.model.generated.ConnectorBuilderProjectDetails; @@ -33,12 +38,14 @@ import io.airbyte.api.model.generated.DeclarativeManifestRead; import io.airbyte.api.model.generated.DeclarativeManifestRequestBody; import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.api.model.generated.OAuthConsentRead; import io.airbyte.api.model.generated.SourceDefinitionIdBody; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.server.errors.NotFoundException; import io.airbyte.commons.server.handlers.helpers.BuilderProjectUpdater; import io.airbyte.commons.server.handlers.helpers.DeclarativeSourceManifestInjector; +import io.airbyte.commons.server.handlers.helpers.OAuthHelper; import io.airbyte.commons.version.Version; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ConfigSchema; @@ -72,7 +79,11 @@ import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Organization; import io.airbyte.featureflag.UseRuntimeSecretPersistence; +import io.airbyte.metrics.lib.ApmTraceUtils; +import io.airbyte.oauth.OAuthImplementationFactory; +import io.airbyte.oauth.declarative.DeclarativeOAuthFlow; import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.protocol.models.OAuthConfigSpecification; import io.airbyte.validation.json.JsonValidationException; import io.micronaut.core.util.CollectionUtils; import jakarta.annotation.Nullable; @@ -117,6 +128,7 @@ public class ConnectorBuilderProjectsHandler { private final ConnectorBuilderServerApi connectorBuilderServerApiClient; private final ActorDefinitionService actorDefinitionService; private final RemoteDefinitionsProvider remoteDefinitionsProvider; + private final OAuthImplementationFactory oAuthImplementationFactory; public static final String SPEC_FIELD = "spec"; public static final String CONNECTION_SPECIFICATION_FIELD = "connection_specification"; @@ -136,7 +148,8 @@ public ConnectorBuilderProjectsHandler(final DeclarativeManifestImageVersionServ @Named("jsonSecretsProcessorWithCopy") final JsonSecretsProcessor secretsProcessor, final ConnectorBuilderServerApi connectorBuilderServerApiClient, final ActorDefinitionService actorDefinitionService, - final RemoteDefinitionsProvider remoteDefinitionsProvider) { + final RemoteDefinitionsProvider remoteDefinitionsProvider, + @Named("oauthImplementationFactory") final OAuthImplementationFactory oauthImplementationFactory) { this.declarativeManifestImageVersionService = declarativeManifestImageVersionService; this.connectorBuilderService = connectorBuilderService; this.buildProjectUpdater = builderProjectUpdater; @@ -152,6 +165,7 @@ public ConnectorBuilderProjectsHandler(final DeclarativeManifestImageVersionServ this.connectorBuilderServerApiClient = connectorBuilderServerApiClient; this.actorDefinitionService = actorDefinitionService; this.remoteDefinitionsProvider = remoteDefinitionsProvider; + this.oAuthImplementationFactory = oauthImplementationFactory; } private ConnectorBuilderProjectDetailsRead getProjectDetailsWithoutBaseAdvInfo(final ConnectorBuilderProject project) { @@ -422,7 +436,6 @@ private UUID createActorDefinition(final String name, final UUID workspaceId, fi return source.getSourceDefinitionId(); } - @SuppressWarnings("PMD.PreserveStackTrace") public JsonNode updateConnectorBuilderProjectTestingValues(final ConnectorBuilderProjectTestingValuesUpdate testingValuesUpdate) throws ConfigNotFoundException, IOException, JsonValidationException { try { @@ -611,4 +624,63 @@ public ConnectorBuilderProjectIdWithWorkspaceId createForkedConnectorBuilderProj new ConnectorBuilderProjectWithWorkspaceId().workspaceId(requestBody.getWorkspaceId()).builderProject(projectDetails)); } + public OAuthConsentRead getConnectorBuilderProjectOAuthConsent(final BuilderProjectOauthConsentRequest requestBody) + throws JsonValidationException, ConfigNotFoundException, IOException { + + final ConnectorBuilderProject project = connectorBuilderService.getConnectorBuilderProject(requestBody.getBuilderProjectId(), true); + final ConnectorSpecification spec = Jsons.object(project.getManifestDraft().get("spec"), ConnectorSpecification.class); + + final Optional secretPersistenceConfig = getSecretPersistenceConfig(project.getWorkspaceId()); + final JsonNode existingHydratedTestingValues = + getHydratedTestingValues(project, secretPersistenceConfig.orElse(null)).orElse(Jsons.emptyObject()); + + final Map traceTags = Map.of(WORKSPACE_ID_KEY, requestBody.getWorkspaceId(), CONNECTOR_BUILDER_PROJECT_ID_KEY, + requestBody.getBuilderProjectId()); + ApmTraceUtils.addTagsToTrace(traceTags); + ApmTraceUtils.addTagsToRootSpan(traceTags); + + final OAuthConfigSpecification oauthConfigSpecification = spec.getAdvancedAuth().getOauthConfigSpecification(); + OAuthHelper.updateOauthConfigToAcceptAdditionalUserInputProperties(oauthConfigSpecification); + + final DeclarativeOAuthFlow oAuthFlowImplementation = oAuthImplementationFactory.createDeclarativeOAuthImplementation(spec); + return new OAuthConsentRead().consentUrl(oAuthFlowImplementation.getSourceConsentUrl( + requestBody.getWorkspaceId(), + null, + requestBody.getRedirectUrl(), + existingHydratedTestingValues, + oauthConfigSpecification, + existingHydratedTestingValues)); + } + + public CompleteOAuthResponse completeConnectorBuilderProjectOAuth(final CompleteConnectorBuilderProjectOauthRequest requestBody) + throws JsonValidationException, ConfigNotFoundException, IOException { + + final ConnectorBuilderProject project = connectorBuilderService.getConnectorBuilderProject(requestBody.getBuilderProjectId(), true); + final ConnectorSpecification spec = Jsons.object(project.getManifestDraft().get("spec"), ConnectorSpecification.class); + + final Optional secretPersistenceConfig = getSecretPersistenceConfig(project.getWorkspaceId()); + final JsonNode existingHydratedTestingValues = + getHydratedTestingValues(project, secretPersistenceConfig.orElse(null)).orElse(Jsons.emptyObject()); + + final Map traceTags = Map.of(WORKSPACE_ID_KEY, requestBody.getWorkspaceId(), CONNECTOR_BUILDER_PROJECT_ID_KEY, + requestBody.getBuilderProjectId()); + ApmTraceUtils.addTagsToTrace(traceTags); + ApmTraceUtils.addTagsToRootSpan(traceTags); + + final OAuthConfigSpecification oauthConfigSpecification = spec.getAdvancedAuth().getOauthConfigSpecification(); + OAuthHelper.updateOauthConfigToAcceptAdditionalUserInputProperties(oauthConfigSpecification); + + final DeclarativeOAuthFlow oAuthFlowImplementation = oAuthImplementationFactory.createDeclarativeOAuthImplementation(spec); + final Map result = oAuthFlowImplementation.completeSourceOAuth( + requestBody.getWorkspaceId(), + null, + requestBody.getQueryParams(), + requestBody.getRedirectUrl(), + existingHydratedTestingValues, + oauthConfigSpecification, + existingHydratedTestingValues); + + return OAuthHelper.mapToCompleteOAuthResponse(result); + } + } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorDefinitionSpecificationHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorDefinitionSpecificationHandler.java index 3735ca359e3..27e68fb112c 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorDefinitionSpecificationHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectorDefinitionSpecificationHandler.java @@ -4,6 +4,7 @@ package io.airbyte.commons.server.handlers; +import com.google.common.annotations.VisibleForTesting; import io.airbyte.api.model.generated.AdvancedAuth; import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; @@ -17,13 +18,16 @@ import io.airbyte.commons.server.scheduler.SynchronousJobMetadata; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.DestinationConnection; +import io.airbyte.config.DestinationOAuthParameter; import io.airbyte.config.JobConfig; import io.airbyte.config.SourceConnection; +import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.data.exceptions.ConfigNotFoundException; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.SourceService; import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Singleton; @@ -45,15 +49,18 @@ public class ConnectorDefinitionSpecificationHandler { private final JobConverter jobConverter; private final SourceService sourceService; private final DestinationService destinationService; + private final OAuthService oAuthService; public ConnectorDefinitionSpecificationHandler(final ActorDefinitionVersionHelper actorDefinitionVersionHelper, final JobConverter jobConverter, final SourceService sourceService, - final DestinationService destinationService) { + final DestinationService destinationService, + final OAuthService oauthService) { this.actorDefinitionVersionHelper = actorDefinitionVersionHelper; this.jobConverter = jobConverter; this.sourceService = sourceService; this.destinationService = destinationService; + this.oAuthService = oauthService; } /** @@ -73,7 +80,7 @@ public SourceDefinitionSpecificationRead getSpecificationForSourceId(final Sourc actorDefinitionVersionHelper.getSourceVersion(sourceDefinition, source.getWorkspaceId(), sourceIdRequestBody.getSourceId()); final io.airbyte.protocol.models.ConnectorSpecification spec = sourceVersion.getSpec(); - return getSourceSpecificationRead(sourceDefinition, spec); + return getSourceSpecificationRead(sourceDefinition, spec, source.getWorkspaceId()); } /** @@ -93,7 +100,7 @@ public SourceDefinitionSpecificationRead getSourceDefinitionSpecification(final actorDefinitionVersionHelper.getSourceVersion(source, sourceDefinitionIdWithWorkspaceId.getWorkspaceId()); final io.airbyte.protocol.models.ConnectorSpecification spec = sourceVersion.getSpec(); - return getSourceSpecificationRead(source, spec); + return getSourceSpecificationRead(source, spec, sourceDefinitionIdWithWorkspaceId.getWorkspaceId()); } /** @@ -114,7 +121,7 @@ public DestinationDefinitionSpecificationRead getSpecificationForDestinationId(f actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, destination.getWorkspaceId(), destinationIdRequestBody.getDestinationId()); final io.airbyte.protocol.models.ConnectorSpecification spec = destinationVersion.getSpec(); - return getDestinationSpecificationRead(destinationDefinition, spec, destinationVersion.getSupportsRefreshes()); + return getDestinationSpecificationRead(destinationDefinition, spec, destinationVersion.getSupportsRefreshes(), destination.getWorkspaceId()); } /** @@ -136,11 +143,15 @@ public DestinationDefinitionSpecificationRead getDestinationSpecification(final actorDefinitionVersionHelper.getDestinationVersion(destination, destinationDefinitionIdWithWorkspaceId.getWorkspaceId()); final io.airbyte.protocol.models.ConnectorSpecification spec = destinationVersion.getSpec(); - return getDestinationSpecificationRead(destination, spec, destinationVersion.getSupportsRefreshes()); + return getDestinationSpecificationRead(destination, spec, destinationVersion.getSupportsRefreshes(), + destinationDefinitionIdWithWorkspaceId.getWorkspaceId()); } - private SourceDefinitionSpecificationRead getSourceSpecificationRead(final StandardSourceDefinition sourceDefinition, - final io.airbyte.protocol.models.ConnectorSpecification spec) { + @VisibleForTesting + SourceDefinitionSpecificationRead getSourceSpecificationRead(final StandardSourceDefinition sourceDefinition, + final io.airbyte.protocol.models.ConnectorSpecification spec, + final UUID workspaceId) + throws IOException { final SourceDefinitionSpecificationRead specRead = new SourceDefinitionSpecificationRead() .jobInfo(jobConverter.getSynchronousJobRead(SynchronousJobMetadata.mock(JobConfig.ConfigType.GET_SPEC))) .connectionSpecification(spec.getConnectionSpecification()) @@ -152,13 +163,21 @@ private SourceDefinitionSpecificationRead getSourceSpecificationRead(final Stand final Optional advancedAuth = OauthModelConverter.getAdvancedAuth(spec); advancedAuth.ifPresent(specRead::setAdvancedAuth); + if (advancedAuth.isPresent()) { + final Optional sourceOAuthParameter = + oAuthService.getSourceOAuthParameterOptional(workspaceId, sourceDefinition.getSourceDefinitionId()); + specRead.setAdvancedAuthCredentialsAvailable(sourceOAuthParameter.isPresent()); + } return specRead; } - private DestinationDefinitionSpecificationRead getDestinationSpecificationRead(final StandardDestinationDefinition destinationDefinition, - final io.airbyte.protocol.models.ConnectorSpecification spec, - final boolean supportsRefreshes) { + @VisibleForTesting + DestinationDefinitionSpecificationRead getDestinationSpecificationRead(final StandardDestinationDefinition destinationDefinition, + final io.airbyte.protocol.models.ConnectorSpecification spec, + final boolean supportsRefreshes, + final UUID workspaceId) + throws IOException { final DestinationDefinitionSpecificationRead specRead = new DestinationDefinitionSpecificationRead() .jobInfo(jobConverter.getSynchronousJobRead(SynchronousJobMetadata.mock(JobConfig.ConfigType.GET_SPEC))) .supportedDestinationSyncModes(getFinalDestinationSyncModes(spec.getSupportedDestinationSyncModes(), supportsRefreshes)) @@ -168,12 +187,17 @@ private DestinationDefinitionSpecificationRead getDestinationSpecificationRead(f final Optional advancedAuth = OauthModelConverter.getAdvancedAuth(spec); advancedAuth.ifPresent(specRead::setAdvancedAuth); + if (advancedAuth.isPresent()) { + final Optional destinationOAuthParameter = + oAuthService.getDestinationOAuthParameterOptional(workspaceId, destinationDefinition.getDestinationDefinitionId()); + specRead.setAdvancedAuthCredentialsAvailable(destinationOAuthParameter.isPresent()); + } return specRead; } private List getFinalDestinationSyncModes(final List syncModes, - boolean supportsRefreshes) { + final boolean supportsRefreshes) { final List finalSyncModes = new ArrayList<>(); boolean hasDedup = false; boolean hasOverwrite = false; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandler.java index 5a6b7d805cd..96d9a7c8955 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/InstanceConfigurationHandler.java @@ -42,7 +42,6 @@ import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,7 +49,6 @@ * InstanceConfigurationHandler. Javadocs suppressed because api docs should be used as source of * truth. */ -@Slf4j @Singleton public class InstanceConfigurationHandler { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java index a5ed59efb0e..4b02cd59ee2 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobHistoryHandler.java @@ -39,8 +39,6 @@ import io.airbyte.api.model.generated.StreamStats; import io.airbyte.api.model.generated.StreamSyncProgressReadItem; import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.logging.LogClientManager; -import io.airbyte.commons.logging.LogUtils; import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.converters.JobConverter; import io.airbyte.commons.server.converters.WorkflowStateConverter; @@ -64,7 +62,6 @@ import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.metrics.lib.MetricTags; import io.airbyte.persistence.job.JobPersistence; -import io.airbyte.persistence.job.WorkspaceHelper; import io.airbyte.validation.json.JsonValidationException; import io.micronaut.core.util.CollectionUtils; import jakarta.inject.Singleton; @@ -80,13 +77,11 @@ import java.util.UUID; import java.util.function.Function; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; /** * JobHistoryHandler. Javadocs suppressed because api docs should be used as source of truth. */ @Singleton -@Slf4j @SuppressWarnings("PMD.PreserveStackTrace") public class JobHistoryHandler { @@ -114,14 +109,12 @@ public JobHistoryHandler(final JobPersistence jobPersistence, final AirbyteVersion airbyteVersion, final TemporalClient temporalClient, final FeatureFlagClient featureFlagClient, - final LogClientManager logClientManager, + final JobConverter jobConverter, final JobService jobService, - final ApiPojoConverters apiPojoConverters, - final LogUtils logUtils, - final WorkspaceHelper workspaceHelper) { + final ApiPojoConverters apiPojoConverters) { this.featureFlagClient = featureFlagClient; + this.jobConverter = jobConverter; this.jobService = jobService; - jobConverter = new JobConverter(featureFlagClient, logClientManager, logUtils, workspaceHelper); workflowStateConverter = new WorkflowStateConverter(); this.jobPersistence = jobPersistence; this.connectionService = connectionService; @@ -355,6 +348,7 @@ public JobDebugInfoRead getJobDebugInfo(final JobIdRequestBody jobIdRequestBody) return jobDebugInfoRead; } + @Trace public Optional getLatestRunningSyncJob(final UUID connectionId) throws IOException { final List nonTerminalSyncJobsForConnection = jobPersistence.listJobsForConnectionWithStatuses( connectionId, @@ -437,14 +431,17 @@ public ConnectionSyncProgressRead getConnectionSyncProgress(final ConnectionIdRe .streams(finalStreamsWithStats); } + @Trace public Optional getLatestSyncJob(final UUID connectionId) throws IOException { return jobPersistence.getLastSyncJob(connectionId).map(JobConverter::getJobRead); } + @Trace public List getLatestSyncJobsForConnections(final List connectionIds) throws IOException { return jobPersistence.getLastSyncJobForConnections(connectionIds); } + @Trace public List getRunningSyncJobForConnections(final List connectionIds) throws IOException { return jobPersistence.getRunningSyncJobForConnections(connectionIds).stream() .map(JobConverter::getJobRead) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java index c5d9d7dde57..80adac9a8a5 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/JobInputHandler.java @@ -29,6 +29,7 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ActorType; import io.airbyte.config.AttemptSyncConfig; +import io.airbyte.config.ConfigScopeType; import io.airbyte.config.ConnectionContext; import io.airbyte.config.DestinationConnection; import io.airbyte.config.Job; @@ -39,6 +40,7 @@ import io.airbyte.config.RefreshConfig; import io.airbyte.config.ResetSourceConfiguration; import io.airbyte.config.ResourceRequirements; +import io.airbyte.config.ScopedConfiguration; import io.airbyte.config.SourceConnection; import io.airbyte.config.StandardCheckConnectionInput; import io.airbyte.config.StandardDestinationDefinition; @@ -52,7 +54,9 @@ import io.airbyte.config.persistence.ConfigInjector; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SourceService; +import io.airbyte.data.services.shared.NetworkSecurityTokenKey; import io.airbyte.featureflag.Connection; import io.airbyte.featureflag.Context; import io.airbyte.featureflag.FeatureFlagClient; @@ -76,6 +80,7 @@ import java.util.Map; import java.util.Optional; import java.util.UUID; +import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -97,6 +102,7 @@ public class JobInputHandler { private final SourceService sourceService; private final DestinationService destinationService; private final ApiPojoConverters apiPojoConverters; + private final ScopedConfigurationService scopedConfigurationService; private static final Logger LOGGER = LoggerFactory.getLogger(JobInputHandler.class); @@ -112,7 +118,8 @@ public JobInputHandler(final JobPersistence jobPersistence, final ConnectionService connectionService, final SourceService sourceService, final DestinationService destinationService, - final ApiPojoConverters apiPojoConverters) { + final ApiPojoConverters apiPojoConverters, + final ScopedConfigurationService scopedConfigurationService) { this.jobPersistence = jobPersistence; this.featureFlagClient = featureFlagClient; this.oAuthConfigSupplier = oAuthConfigSupplier; @@ -125,6 +132,7 @@ public JobInputHandler(final JobPersistence jobPersistence, this.sourceService = sourceService; this.destinationService = destinationService; this.apiPojoConverters = apiPojoConverters; + this.scopedConfigurationService = scopedConfigurationService; } /** @@ -227,7 +235,8 @@ public Object getJobInput(final SyncInput input) { .withIsReset(JobConfig.ConfigType.RESET_CONNECTION.equals(jobConfigType)) .withConnectionContext(connectionContext) .withUseAsyncReplicate(useAsyncReplicate) - .withUseAsyncActivities(useAsyncActivities); + .withUseAsyncActivities(useAsyncActivities) + .withNetworkSecurityTokens(getNetworkSecurityTokens(config.getWorkspaceId())); saveAttemptSyncConfig(jobId, attempt, connectionId, attemptSyncConfig); return new JobInput(jobRunConfig, sourceLauncherConfig, destinationLauncherConfig, syncInput); @@ -294,7 +303,8 @@ public Object getCheckJobInput(final CheckInput input) { .withActorId(source.getSourceId()) .withConnectionConfiguration(sourceConfiguration) .withResourceRequirements(sourceCheckResourceRequirements) - .withActorContext(sourceContext); + .withActorContext(sourceContext) + .withNetworkSecurityTokens(getNetworkSecurityTokens(jobSyncConfig.getWorkspaceId())); final ResourceRequirements destinationCheckResourceRequirements = getResourceRequirementsForJobType(destinationDefinition.getResourceRequirements(), JobType.CHECK_CONNECTION); @@ -306,7 +316,8 @@ public Object getCheckJobInput(final CheckInput input) { .withActorId(destination.getDestinationId()) .withConnectionConfiguration(destinationConfiguration) .withResourceRequirements(destinationCheckResourceRequirements) - .withActorContext(destinationContext); + .withActorContext(destinationContext) + .withNetworkSecurityTokens(getNetworkSecurityTokens(jobSyncConfig.getWorkspaceId())); return new SyncJobCheckConnectionInputs( sourceLauncherConfig, destinationLauncherConfig, @@ -452,4 +463,17 @@ private JsonNode getDestinationConfiguration(final DestinationConnection destina destination.getConfiguration()), destination.getDestinationDefinitionId()); } + private @NotNull List getNetworkSecurityTokens(final UUID workspaceId) { + final Map scopes = Map.of(ConfigScopeType.WORKSPACE, workspaceId); + try { + final List podLabelConfigurations = + scopedConfigurationService.getScopedConfigurations(NetworkSecurityTokenKey.INSTANCE, scopes); + return podLabelConfigurations.stream().map(ScopedConfiguration::getValue).toList(); + + } catch (IllegalArgumentException e) { + LOGGER.error(e.getMessage()); + return Collections.emptyList(); + } + } + } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/NotificationsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/NotificationsHandler.java index dfdebeed528..a9ac5fa003d 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/NotificationsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/NotificationsHandler.java @@ -37,35 +37,38 @@ public class NotificationsHandler { public static final String AIRBYTE_URL = "https://airbyte.com/"; - public static final SyncSummary TEST_SUCCESS_SUMMARY = SyncSummary.builder() - .workspace(WorkspaceInfo.builder().id(UUID.randomUUID()).name("Workspace").url(AIRBYTE_URL).build()) - .connection(ConnectionInfo.builder().id(UUID.randomUUID()).name("Connection").url(AIRBYTE_URL).build()) - .destination(DestinationInfo.builder().id(UUID.randomUUID()).name("Destination").url(AIRBYTE_URL).build()) - .source(SourceInfo.builder().id(UUID.randomUUID()).name("Source").url(AIRBYTE_URL).build()) - .startedAt(Instant.now().minusSeconds(3600)) - .finishedAt(Instant.now()) - .isSuccess(true) - .jobId(10L) - .recordsEmitted(10000) - .recordsCommitted(1000) - .bytesEmitted(159341141) - .bytesCommitted(159341141) - .build(); - public static final SyncSummary TEST_FAILURE_SUMMARY = SyncSummary.builder() - .workspace(WorkspaceInfo.builder().id(UUID.randomUUID()).name("Main Workspace").url(AIRBYTE_URL).build()) - .connection(ConnectionInfo.builder().id(UUID.randomUUID()).name("Test Connection").url(AIRBYTE_URL).build()) - .destination(DestinationInfo.builder().id(UUID.randomUUID()).name("The Destination").url(AIRBYTE_URL).build()) - .source(SourceInfo.builder().id(UUID.randomUUID()).name("The Source").url(AIRBYTE_URL).build()) - .startedAt(Instant.now().minusSeconds(3600)) - .finishedAt(Instant.now()) - .errorMessage("This is test notification. Everything is fine! This is where the error message will show up when an actual sync fails.") - .isSuccess(false) - .jobId(10L) - .recordsEmitted(10000) - .recordsCommitted(10) - .bytesEmitted(159341141) - .bytesCommitted(1893412) - .build(); + public static final SyncSummary TEST_SUCCESS_SUMMARY = new SyncSummary( + new WorkspaceInfo(UUID.randomUUID(), "Workspace", AIRBYTE_URL), + new ConnectionInfo(UUID.randomUUID(), "Connection", AIRBYTE_URL), + new SourceInfo(UUID.randomUUID(), "Source", AIRBYTE_URL), + new DestinationInfo(UUID.randomUUID(), "Destination", AIRBYTE_URL), + 10L, + true, + Instant.now().minusSeconds(3600), + Instant.now(), + 159341141, + 159341141, + 10000, + 1000, + 0, + 0, + null); + public static final SyncSummary TEST_FAILURE_SUMMARY = new SyncSummary( + new WorkspaceInfo(UUID.randomUUID(), "Main Workspace", AIRBYTE_URL), + new ConnectionInfo(UUID.randomUUID(), "Test Connection", AIRBYTE_URL), + new SourceInfo(UUID.randomUUID(), "The Source", AIRBYTE_URL), + new DestinationInfo(UUID.randomUUID(), "The Destination", AIRBYTE_URL), + 10L, + false, + Instant.now().minusSeconds(3600), + Instant.now(), + 159341141, + 1893412, + 10000, + 10, + 0, + 0, + "This is test notification. Everything is fine! This is where the error message will show up when an actual sync fails."); public static final CatalogDiff TEST_DIFF = new CatalogDiff() .addTransformsItem(new StreamTransform().transformType(StreamTransform.TransformTypeEnum.ADD_STREAM) .streamDescriptor(new StreamDescriptor().name("some_new_stream").namespace("ns"))) @@ -77,13 +80,12 @@ public class NotificationsHandler { new FieldTransform().fieldName(List.of("path", "field")).transformType(FieldTransform.TransformTypeEnum.REMOVE_FIELD) .breaking(false), new FieldTransform().fieldName(List.of("new_field")).transformType(FieldTransform.TransformTypeEnum.ADD_FIELD).breaking(false))))); - public static final SchemaUpdateNotification TEST_SCHEMA_UPDATE = SchemaUpdateNotification.builder() - .workspace(WorkspaceInfo.builder().id(UUID.randomUUID()).name("Test notification workspace").url(AIRBYTE_URL).build()) - .sourceInfo(SourceInfo.builder().id(UUID.randomUUID()).name("Some Source").url(AIRBYTE_URL).build()) - .connectionInfo(ConnectionInfo.builder().id(UUID.randomUUID()).name("Some Connection").url(AIRBYTE_URL).build()) - .isBreakingChange(false) - .catalogDiff(TEST_DIFF) - .build(); + public static final SchemaUpdateNotification TEST_SCHEMA_UPDATE = new SchemaUpdateNotification( + new WorkspaceInfo(UUID.randomUUID(), "Test notification workspace", AIRBYTE_URL), + new ConnectionInfo(UUID.randomUUID(), "Some Connection", AIRBYTE_URL), + new SourceInfo(UUID.randomUUID(), "Some Source", AIRBYTE_URL), + false, + TEST_DIFF); private static final Map NOTIFICATION_TRIGGER_TEST_MESSAGE = Map.of( NotificationTrigger.SYNC_SUCCESS, "Hello World! This is a test from Airbyte to try slack notification settings for sync successes.", NotificationTrigger.SYNC_FAILURE, "Hello World! This is a test from Airbyte to try slack notification settings for sync failures.", diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java index 1c4e6921950..dc286d9f04b 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java @@ -29,7 +29,7 @@ import io.airbyte.commons.json.JsonPaths; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.server.errors.BadObjectSchemaKnownException; -import io.airbyte.commons.server.handlers.helpers.OAuthPathExtractor; +import io.airbyte.commons.server.handlers.helpers.OAuthHelper; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ConfigSchema; import io.airbyte.config.DestinationConnection; @@ -70,7 +70,6 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; -import java.net.http.HttpClient; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -101,7 +100,7 @@ public class OAuthHandler { private final SecretPersistenceConfigService secretPersistenceConfigService; private final WorkspaceService workspaceService; - public OAuthHandler(@Named("oauthHttpClient") final HttpClient httpClient, + public OAuthHandler(@Named("oauthImplementationFactory") final OAuthImplementationFactory oauthImplementationFactory, final TrackingClient trackingClient, final SecretsRepositoryWriter secretsRepositoryWriter, final ActorDefinitionVersionHelper actorDefinitionVersionHelper, @@ -111,7 +110,7 @@ public OAuthHandler(@Named("oauthHttpClient") final HttpClient httpClient, final OAuthService oauthService, final SecretPersistenceConfigService secretPersistenceConfigService, final WorkspaceService workspaceService) { - this.oAuthImplementationFactory = new OAuthImplementationFactory(httpClient); + this.oAuthImplementationFactory = oauthImplementationFactory; this.trackingClient = trackingClient; this.secretsRepositoryWriter = secretsRepositoryWriter; this.actorDefinitionVersionHelper = actorDefinitionVersionHelper; @@ -325,7 +324,7 @@ public CompleteOAuthResponse completeSourceOAuth(final CompleteSourceOauthReques } catch (final Exception e) { LOGGER.error(ERROR_MESSAGE, e); } - return mapToCompleteOAuthResponse(result); + return OAuthHelper.mapToCompleteOAuthResponse(result); } @SuppressWarnings("PMD.PreserveStackTrace") @@ -387,7 +386,7 @@ public CompleteOAuthResponse completeDestinationOAuth(final CompleteDestinationO } catch (final Exception e) { LOGGER.error(ERROR_MESSAGE, e); } - return mapToCompleteOAuthResponse(result); + return OAuthHelper.mapToCompleteOAuthResponse(result); } @SuppressWarnings("PMD.PreserveStackTrace") @@ -443,7 +442,7 @@ private JsonNode getOAuthInputConfigurationForConsent(final ConnectorSpecificati final JsonNode hydratedSourceConnectionConfiguration, final JsonNode oAuthInputConfiguration) { final Map fieldsToGet = - buildJsonPathFromOAuthFlowInitParameters(OAuthPathExtractor.extractOauthConfigurationPaths( + buildJsonPathFromOAuthFlowInitParameters(OAuthHelper.extractOauthConfigurationPaths( spec.getAdvancedAuth().getOauthConfigSpecification().getOauthUserInputFromConnectorConfigSpecification())); final JsonNode oAuthInputConfigurationFromDB = getOAuthInputConfiguration(hydratedSourceConnectionConfiguration, fieldsToGet); @@ -451,29 +450,6 @@ private JsonNode getOAuthInputConfigurationForConsent(final ConnectorSpecificati return getOauthFromDBIfNeeded(oAuthInputConfigurationFromDB, oAuthInputConfiguration); } - CompleteOAuthResponse mapToCompleteOAuthResponse(final Map input) { - final CompleteOAuthResponse response = new CompleteOAuthResponse(); - response.setAuthPayload(new HashMap<>()); - - if (input.containsKey("request_succeeded")) { - response.setRequestSucceeded("true".equals(input.get("request_succeeded"))); - } else { - response.setRequestSucceeded(true); - } - - if (input.containsKey("request_error")) { - response.setRequestError(input.get("request_error").toString()); - } - - input.forEach((k, v) -> { - if (!"request_succeeded".equals(k) && !"request_error".equals(k)) { - response.getAuthPayload().put(k, v); - } - }); - - return response; - } - @VisibleForTesting Map buildJsonPathFromOAuthFlowInitParameters(final Map> oAuthFlowInitParameters) { return oAuthFlowInitParameters.entrySet().stream() @@ -558,7 +534,7 @@ public CompleteOAuthResponse writeOAuthResponseSecret(final UUID workspaceId, fi generateOAuthSecretCoordinate(workspaceId), payloadString, null); } - return mapToCompleteOAuthResponse(Map.of("secretId", secretCoordinate.getFullCoordinate())); + return OAuthHelper.mapToCompleteOAuthResponse(Map.of("secretId", secretCoordinate.getFullCoordinate())); } catch (final JsonProcessingException e) { throw new RuntimeException("Json object could not be written to string.", e); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java index b7a65f62040..0fd667f3e3d 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OrganizationsHandler.java @@ -58,8 +58,6 @@ private static OrganizationRead buildOrganizationRead(final Organization organiz .organizationId(organization.getOrganizationId()) .organizationName(organization.getName()) .email(organization.getEmail()) - .pba(organization.getPba()) - .orgLevelBilling(organization.getOrgLevelBilling()) .ssoRealm(organization.getSsoRealm()); } @@ -69,15 +67,11 @@ public OrganizationRead createOrganization(final OrganizationCreateRequestBody o final String email = organizationCreateRequestBody.getEmail(); final UUID userId = organizationCreateRequestBody.getUserId(); final UUID orgId = uuidGenerator.get(); - final Boolean pba = organizationCreateRequestBody.getPba() != null && organizationCreateRequestBody.getPba(); - final Boolean orgLevelBilling = organizationCreateRequestBody.getOrgLevelBilling() != null && organizationCreateRequestBody.getOrgLevelBilling(); final Organization organization = new Organization() .withOrganizationId(orgId) .withName(organizationName) .withEmail(email) - .withUserId(userId) - .withPba(pba) - .withOrgLevelBilling(orgLevelBilling); + .withUserId(userId); organizationPersistence.createOrganization(organization); try { @@ -103,15 +97,6 @@ public OrganizationRead updateOrganization(final OrganizationUpdateRequestBody o organization.setName(organizationUpdateRequestBody.getOrganizationName()); hasChanged = true; } - if (organizationUpdateRequestBody.getPba() != null && !organization.getPba().equals(organizationUpdateRequestBody.getPba())) { - organization.setPba(organizationUpdateRequestBody.getPba()); - hasChanged = true; - } - if (organizationUpdateRequestBody.getOrgLevelBilling() != null && !organization.getOrgLevelBilling() - .equals(organizationUpdateRequestBody.getOrgLevelBilling())) { - organization.setOrgLevelBilling(organizationUpdateRequestBody.getOrgLevelBilling()); - hasChanged = true; - } if (organizationUpdateRequestBody.getEmail() != null && !organizationUpdateRequestBody.getEmail() .equals(organization.getEmail())) { organization.setEmail(organizationUpdateRequestBody.getEmail()); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java index d171f0dd8d5..eeba06bc3ef 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java @@ -82,11 +82,9 @@ import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; -import io.airbyte.featureflag.DiscoverPostprocessInTemporal; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.Organization; import io.airbyte.featureflag.UseRuntimeSecretPersistence; -import io.airbyte.featureflag.Workspace; import io.airbyte.metrics.lib.MetricAttribute; import io.airbyte.metrics.lib.MetricClientFactory; import io.airbyte.metrics.lib.MetricTags; @@ -105,12 +103,12 @@ import jakarta.inject.Singleton; import jakarta.validation.constraints.NotNull; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -119,10 +117,10 @@ */ @SuppressWarnings("ParameterName") @Singleton -@Slf4j public class SchedulerHandler { - private static final Logger LOGGER = LoggerFactory.getLogger(SchedulerHandler.class); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final HashFunction HASH_FUNCTION = Hashing.md5(); private static final Set VALUE_CONFLICT_EXCEPTION_ERROR_CODE_SET = @@ -359,11 +357,7 @@ public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceId(final Source throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.config.persistence.ConfigNotFoundException { final SourceConnection source = sourceService.getSourceConnection(req.getSourceId()); - if (featureFlagClient.boolVariation(DiscoverPostprocessInTemporal.INSTANCE, new Workspace(source.getWorkspaceId()))) { - return discover(req, source); - } else { - return discoverAndGloballyDisable(req, source); - } + return discover(req, source); } /** @@ -499,10 +493,10 @@ private SourceDiscoverSchemaRead runDiscoverJobDiffAndConditionallyDisable(final public void applySchemaChangeForSource(final SourceAutoPropagateChange sourceAutoPropagateChange) throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.config.persistence.ConfigNotFoundException { - LOGGER.info("Applying schema changes for source '{}' in workspace '{}'", + log.info("Applying schema changes for source '{}' in workspace '{}'", sourceAutoPropagateChange.getSourceId(), sourceAutoPropagateChange.getWorkspaceId()); if (sourceAutoPropagateChange.getSourceId() == null) { - LOGGER.warn("Missing required field sourceId for applying schema change."); + log.warn("Missing required field sourceId for applying schema change."); return; } @@ -511,7 +505,7 @@ public void applySchemaChangeForSource(final SourceAutoPropagateChange sourceAut || sourceAutoPropagateChange.getCatalog() == null) { MetricClientFactory.getMetricClient().count(OssMetricsRegistry.MISSING_APPLY_SCHEMA_CHANGE_INPUT, 1, new MetricAttribute(MetricTags.SOURCE_ID, sourceAutoPropagateChange.getSourceId().toString())); - LOGGER.warn("Missing required fields for applying schema change. sourceId: {}, workspaceId: {}, catalogId: {}, catalog: {}", + log.warn("Missing required fields for applying schema change. sourceId: {}, workspaceId: {}, catalogId: {}, catalog: {}", sourceAutoPropagateChange.getSourceId(), sourceAutoPropagateChange.getWorkspaceId(), sourceAutoPropagateChange.getCatalogId(), sourceAutoPropagateChange.getCatalog()); return; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandler.java index 372108e5e0c..3a166257fc4 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendCheckUpdatesHandler.java @@ -11,13 +11,15 @@ import io.airbyte.config.specs.RemoteDefinitionsProvider; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * The web backend is an abstraction that allows the frontend to structure data in such a way that @@ -26,10 +28,11 @@ * * Javadocs suppressed because api docs should be used as source of truth. */ -@Slf4j @Singleton public class WebBackendCheckUpdatesHandler { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final int NO_CHANGES_FOUND = 0; final SourceDefinitionsHandler sourceDefinitionsHandler; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java index 6ab8f63d7d5..864a764609c 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java @@ -73,6 +73,7 @@ import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.data.services.shared.StandardSyncQuery; +import io.airbyte.mappers.transformations.DestinationCatalogGenerator; import io.airbyte.metrics.lib.ApmTraceUtils; import io.airbyte.metrics.lib.MetricTags; import io.airbyte.validation.json.JsonValidationException; @@ -110,6 +111,7 @@ public class WebBackendConnectionsHandler { private final EventRunner eventRunner; // todo (cgardens) - this handler should NOT have access to the db. only access via handler. private final ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private final DestinationCatalogGenerator destinationCatalogGenerator; private final FieldGenerator fieldGenerator; private final CatalogService catalogService; @@ -139,7 +141,8 @@ public WebBackendConnectionsHandler(final ActorDefinitionVersionHandler actorDef final WorkspaceService workspaceService, final CatalogConverter catalogConverter, final ApplySchemaChangeHelper applySchemaChangeHelper, - final ApiPojoConverters apiPojoConverters) { + final ApiPojoConverters apiPojoConverters, + final DestinationCatalogGenerator destinationCatalogGenerator) { this.actorDefinitionVersionHandler = actorDefinitionVersionHandler; this.connectionsHandler = connectionsHandler; this.stateHandler = stateHandler; @@ -159,6 +162,7 @@ public WebBackendConnectionsHandler(final ActorDefinitionVersionHandler actorDef this.catalogConverter = catalogConverter; this.applySchemaChangeHelper = applySchemaChangeHelper; this.apiPojoConverters = apiPojoConverters; + this.destinationCatalogGenerator = destinationCatalogGenerator; } public WebBackendWorkspaceStateResult getWorkspaceState(final WebBackendWorkspaceState webBackendWorkspaceState) throws IOException { @@ -353,18 +357,21 @@ static SchemaChange getSchemaChange( return SchemaChange.NO_CHANGE; } + @Trace private SourceRead getSourceRead(final UUID sourceId) throws JsonValidationException, IOException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceIdRequestBody sourceIdRequestBody = new SourceIdRequestBody().sourceId(sourceId); return sourceHandler.getSource(sourceIdRequestBody); } + @Trace private DestinationRead getDestinationRead(final UUID destinationId) throws JsonValidationException, IOException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final DestinationIdRequestBody destinationIdRequestBody = new DestinationIdRequestBody().destinationId(destinationId); return destinationHandler.getDestination(destinationIdRequestBody); } + @Trace private OperationReadList getOperationReadList(final ConnectionRead connectionRead) throws JsonValidationException, IOException, ConfigNotFoundException { final ConnectionIdRequestBody connectionIdRequestBody = new ConnectionIdRequestBody().connectionId(connectionRead.getConnectionId()); @@ -703,36 +710,46 @@ private void resetStreamsIfNeeded(final WebBackendConnectionUpdate webBackendCon throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { final UUID connectionId = webBackendConnectionPatch.getConnectionId(); - final Boolean skipReset = webBackendConnectionPatch.getSkipReset() != null ? webBackendConnectionPatch.getSkipReset() : false; - if (!skipReset) { - final AirbyteCatalog apiExistingCatalog = catalogConverter.toApi(oldConfiguredCatalog, - catalogConverter.getFieldSelectionData(oldConnectionRead.getSyncCatalog())); - final AirbyteCatalog upToDateAirbyteCatalog = updatedConnectionRead.getSyncCatalog(); - final CatalogDiff catalogDiff = - connectionsHandler.getDiff(apiExistingCatalog, upToDateAirbyteCatalog, - catalogConverter.toConfiguredInternal(upToDateAirbyteCatalog), connectionId); - final List apiStreamsToReset = getStreamsToReset(catalogDiff); - final Set changedConfigStreamDescriptors = - connectionsHandler.getConfigurationDiff(apiExistingCatalog, upToDateAirbyteCatalog); - final Set allStreamToReset = new HashSet<>(); - allStreamToReset.addAll(apiStreamsToReset); - allStreamToReset.addAll(changedConfigStreamDescriptors); - final List streamsToReset = - allStreamToReset.stream().map(ApiConverters::toInternal).toList(); - - if (!streamsToReset.isEmpty()) { - final var destinationVersion = actorDefinitionVersionHandler - .getActorDefinitionVersionForDestinationId(new DestinationIdRequestBody().destinationId(oldConnectionRead.getDestinationId())); - if (destinationVersion.getSupportsRefreshes()) { - eventRunner.refreshConnectionAsync( - connectionId, - streamsToReset, - RefreshType.MERGE); - } else { - eventRunner.resetConnectionAsync( - connectionId, - streamsToReset); - } + final boolean skipReset = webBackendConnectionPatch.getSkipReset() != null ? webBackendConnectionPatch.getSkipReset() : false; + if (skipReset) { + return; + } + + // Use destination catalogs when computing diffs so mappings are taken into account + final ConfiguredAirbyteCatalog updatedConfiguredCatalog = catalogConverter.toConfiguredInternal(updatedConnectionRead.getSyncCatalog()); + final ConfiguredAirbyteCatalog updatedDestinationCatalog = destinationCatalogGenerator + .generateDestinationCatalog(updatedConfiguredCatalog).getCatalog(); + final ConfiguredAirbyteCatalog oldDestinationCatalog = destinationCatalogGenerator + .generateDestinationCatalog(oldConfiguredCatalog).getCatalog(); + + final AirbyteCatalog apiExistingCatalog = catalogConverter.toApi(oldDestinationCatalog, + catalogConverter.getFieldSelectionData(oldConnectionRead.getSyncCatalog())); + final AirbyteCatalog apiUpdatedCatalog = catalogConverter.toApi(updatedDestinationCatalog, + catalogConverter.getFieldSelectionData(updatedConnectionRead.getSyncCatalog())); + + final CatalogDiff catalogDiff = + connectionsHandler.getDiff(apiExistingCatalog, apiUpdatedCatalog, updatedDestinationCatalog, connectionId); + final List apiStreamsToReset = getStreamsToReset(catalogDiff); + final Set changedConfigStreamDescriptors = + connectionsHandler.getConfigurationDiff(apiExistingCatalog, apiUpdatedCatalog); + final Set allStreamToReset = new HashSet<>(); + allStreamToReset.addAll(apiStreamsToReset); + allStreamToReset.addAll(changedConfigStreamDescriptors); + final List streamsToReset = + allStreamToReset.stream().map(ApiConverters::toInternal).toList(); + + if (!streamsToReset.isEmpty()) { + final var destinationVersion = actorDefinitionVersionHandler + .getActorDefinitionVersionForDestinationId(new DestinationIdRequestBody().destinationId(oldConnectionRead.getDestinationId())); + if (destinationVersion.getSupportsRefreshes()) { + eventRunner.refreshConnectionAsync( + connectionId, + streamsToReset, + RefreshType.MERGE); + } else { + eventRunner.resetConnectionAsync( + connectionId, + streamsToReset); } } } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java index 07dc0b4753a..8e20393c65d 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WorkspacesHandler.java @@ -485,7 +485,6 @@ private WorkspaceOrganizationInfoRead buildWorkspaceOrganizationInfoRead(final O return new WorkspaceOrganizationInfoRead() .organizationId(organization.getOrganizationId()) .organizationName(organization.getName()) - .pba(organization.getPba()) .sso(organization.getSsoRealm() != null && !organization.getSsoRealm().isEmpty()); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ActorDefinitionHandlerHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ActorDefinitionHandlerHelper.java index d28732bc0cc..5300f54be17 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ActorDefinitionHandlerHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ActorDefinitionHandlerHelper.java @@ -35,14 +35,12 @@ import java.util.Objects; import java.util.Optional; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; /** * A helper class for server code that is the shared for actor definitions (source definitions and * destination definitions). */ @Singleton -@Slf4j public class ActorDefinitionHandlerHelper { private final SynchronousSchedulerClient synchronousSchedulerClient; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ApplySchemaChangeHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ApplySchemaChangeHelper.java index b55f92d149f..efaa6266468 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ApplySchemaChangeHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ApplySchemaChangeHelper.java @@ -26,12 +26,10 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; /** * Helper that allows to generate the catalogs to be auto propagated. */ -@Slf4j @Singleton public class ApplySchemaChangeHelper { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java index 55f07efddcf..4c9ac238598 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/CatalogConverter.java @@ -75,6 +75,7 @@ private io.airbyte.api.model.generated.AirbyteStream toApi(final io.airbyte.prot public ConfiguredStreamMapper toApi(final MapperConfig mapper) { return new ConfiguredStreamMapper() + .id(mapper.id()) .type(Enums.toEnum(mapper.name(), StreamMapperType.class) .orElseThrow(() -> new IllegalArgumentException("Unexpected mapper name: " + mapper.name()))) .mapperConfiguration(Jsons.jsonNode(mapper.config())); @@ -233,7 +234,7 @@ private List toConfiguredHashingMappers(@Nullable final ).toList(); } - private List toConfiguredMappers(final @Nullable List mapperConfigs) { + public List toConfiguredMappers(final @Nullable List mapperConfigs) { if (mapperConfigs == null) { return Collections.emptyList(); } @@ -242,7 +243,7 @@ private List toConfiguredMappers(final @Nullable List { final String mapperName = mapperConfig.getType().toString(); final Mapper mapper = mappers.get(mapperName); - return mapper.spec().deserialize(new ConfiguredMapper(mapperName, mapperConfig.getMapperConfiguration())); + return mapper.spec().deserialize(new ConfiguredMapper(mapperName, mapperConfig.getMapperConfiguration(), mapperConfig.getId())); }) .collect(Collectors.toList()); } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionScheduleHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionScheduleHelper.java index a8c11cf5d26..f3016581ef4 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionScheduleHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionScheduleHelper.java @@ -13,17 +13,28 @@ import io.airbyte.api.problems.throwable.generated.CronValidationInvalidTimezoneProblem; import io.airbyte.api.problems.throwable.generated.CronValidationMissingComponentProblem; import io.airbyte.api.problems.throwable.generated.CronValidationMissingCronProblem; +import io.airbyte.api.problems.throwable.generated.CronValidationUnderOneHourNotAllowedProblem; import io.airbyte.commons.server.converters.ApiPojoConverters; +import io.airbyte.commons.server.helpers.CronExpressionHelper; import io.airbyte.config.BasicSchedule; import io.airbyte.config.Cron; import io.airbyte.config.Schedule; import io.airbyte.config.ScheduleData; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.ScheduleType; +import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.Multi; +import io.airbyte.featureflag.Organization; +import io.airbyte.featureflag.SubOneHourSyncSchedules; +import io.airbyte.featureflag.Workspace; +import io.airbyte.persistence.job.WorkspaceHelper; import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Singleton; import java.text.ParseException; +import java.util.List; import java.util.TimeZone; +import java.util.UUID; import org.joda.time.DateTimeZone; import org.quartz.CronExpression; @@ -36,9 +47,18 @@ public class ConnectionScheduleHelper { private final ApiPojoConverters apiPojoConverters; + private final CronExpressionHelper cronExpressionHelper; + private final FeatureFlagClient featureFlagClient; + private final WorkspaceHelper workspaceHelper; - public ConnectionScheduleHelper(final ApiPojoConverters apiPojoConverters) { + public ConnectionScheduleHelper(final ApiPojoConverters apiPojoConverters, + final CronExpressionHelper cronExpressionHelper, + final FeatureFlagClient featureFlagClient, + final WorkspaceHelper workspaceHelper) { this.apiPojoConverters = apiPojoConverters; + this.cronExpressionHelper = cronExpressionHelper; + this.featureFlagClient = featureFlagClient; + this.workspaceHelper = workspaceHelper; } /** @@ -52,7 +72,8 @@ public ConnectionScheduleHelper(final ApiPojoConverters apiPojoConverters) { public void populateSyncFromScheduleTypeAndData(final StandardSync standardSync, final ConnectionScheduleType scheduleType, final ConnectionScheduleData scheduleData) - throws JsonValidationException { + throws JsonValidationException, ConfigNotFoundException { + if (scheduleType != ConnectionScheduleType.MANUAL && scheduleData == null) { throw new JsonValidationException("schedule data must be populated if schedule type is populated"); } @@ -88,7 +109,6 @@ public void populateSyncFromScheduleTypeAndData(final StandardSync standardSync, if (scheduleData.getCron() == null) { throw new CronValidationMissingCronProblem(); } - // Validate that this is a valid cron expression and timezone. final String cronExpression = scheduleData.getCron().getCronExpression(); final String cronTimeZone = scheduleData.getCron().getCronTimeZone(); if (cronExpression == null || cronTimeZone == null) { @@ -97,23 +117,15 @@ public void populateSyncFromScheduleTypeAndData(final StandardSync standardSync, .cronExpression(cronExpression) .cronTimezone(cronTimeZone)); } - if (cronTimeZone.toLowerCase().startsWith("etc")) { - throw new CronValidationInvalidTimezoneProblem(new ProblemCronTimezoneData() - .connectionId(connectionId) - .cronTimezone(cronTimeZone)); - } - try { - final TimeZone timeZone = DateTimeZone.forID(cronTimeZone).toTimeZone(); - final CronExpression parsedCronExpression = new CronExpression(cronExpression); - parsedCronExpression.setTimeZone(timeZone); - } catch (final ParseException e) { - throw new CronValidationInvalidExpressionProblem(new ProblemCronExpressionData() - .cronExpression(cronExpression)); - } catch (final IllegalArgumentException e) { - throw new CronValidationInvalidTimezoneProblem(new ProblemCronTimezoneData() - .connectionId(connectionId) - .cronTimezone(cronTimeZone)); - } + + final UUID workspaceId = workspaceHelper.getWorkspaceForSourceId(standardSync.getSourceId()); + final UUID organizationId = workspaceHelper.getOrganizationForWorkspace(workspaceId); + final boolean canSyncUnderOneHour = featureFlagClient.boolVariation(SubOneHourSyncSchedules.INSTANCE, new Multi( + List.of(new Organization(organizationId), new Workspace(workspaceId)))); + validateCronFrequency(cronExpression, canSyncUnderOneHour); + + validateCronExpressionAndTimezone(cronTimeZone, cronExpression, connectionId); + standardSync .withScheduleType(ScheduleType.CRON) .withScheduleData(new ScheduleData().withCron(new Cron() @@ -130,4 +142,46 @@ public void populateSyncFromScheduleTypeAndData(final StandardSync standardSync, } } + private void validateCronExpressionAndTimezone(final String cronTimeZone, final String cronExpression, final String connectionId) { + if (cronTimeZone.toLowerCase().startsWith("etc")) { + throw new CronValidationInvalidTimezoneProblem(new ProblemCronTimezoneData() + .connectionId(connectionId) + .cronTimezone(cronTimeZone)); + } + + try { + final TimeZone timeZone = DateTimeZone.forID(cronTimeZone).toTimeZone(); + final CronExpression parsedCronExpression = new CronExpression(cronExpression); + parsedCronExpression.setTimeZone(timeZone); + } catch (final ParseException e) { + throw new CronValidationInvalidExpressionProblem(new ProblemCronExpressionData() + .cronExpression(cronExpression)); + } catch (final IllegalArgumentException e) { + throw new CronValidationInvalidTimezoneProblem(new ProblemCronTimezoneData() + .connectionId(connectionId) + .cronTimezone(cronTimeZone)); + } + } + + private void validateCronFrequency(final String cronExpression, final Boolean canSyncUnderOneHour) { + final com.cronutils.model.Cron cronUtilsModel; + + try { + cronUtilsModel = cronExpressionHelper.validateCronExpression(cronExpression); + } catch (final IllegalArgumentException e) { + throw new CronValidationInvalidExpressionProblem(new ProblemCronExpressionData() + .cronExpression(cronExpression)); + } + + try { + if (!canSyncUnderOneHour) { + cronExpressionHelper.checkDoesNotExecuteMoreThanOncePerHour(cronUtilsModel); + } + } catch (final IllegalArgumentException e) { + throw new CronValidationUnderOneHourNotAllowedProblem(new ProblemCronExpressionData() + .cronExpression(cronExpression) + .validationErrorMessage(e.getMessage())); + } + } + } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionTimelineEventHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionTimelineEventHelper.java index 687138580d4..1c2c940b0ab 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionTimelineEventHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionTimelineEventHelper.java @@ -41,6 +41,7 @@ import io.airbyte.data.services.shared.ManuallyStartedEvent; import io.airbyte.data.services.shared.SchemaChangeAutoPropagationEvent; import io.airbyte.persistence.job.JobPersistence.AttemptStats; +import jakarta.annotation.Nullable; import jakarta.inject.Inject; import jakarta.inject.Named; import jakarta.inject.Singleton; @@ -297,7 +298,7 @@ private void addPatchIfFieldIsChanged(final Map> pat public void logStatusChangedEventInConnectionTimeline(final UUID connectionId, final ConnectionStatus status, - final String updateReason, + @Nullable final String updateReason, final boolean autoUpdate) { try { if (status != null) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ContextBuilder.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ContextBuilder.java index 89f46f8f69d..7acfaad1aa0 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ContextBuilder.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ContextBuilder.java @@ -18,16 +18,19 @@ import io.airbyte.data.services.WorkspaceService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Intended to be used by the server to build context objects so that temporal workflows/activities * have access to relevant IDs. */ -@Slf4j public class ContextBuilder { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final WorkspaceService workspaceService; private final DestinationService destinationService; private final ConnectionService connectionService; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java index 9c958826ba0..b2c71057969 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/JobCreationAndStatusUpdateHelper.java @@ -5,6 +5,7 @@ package io.airbyte.commons.server.handlers.helpers; import static io.airbyte.config.JobConfig.ConfigType.REFRESH; +import static io.airbyte.config.JobConfig.ConfigType.RESET_CONNECTION; import static io.airbyte.config.JobConfig.ConfigType.SYNC; import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.FAILURE_ORIGINS_KEY; import static io.airbyte.metrics.lib.ApmTraceConstants.Tags.FAILURE_TYPES_KEY; @@ -39,6 +40,7 @@ import io.micronaut.core.util.CollectionUtils; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -50,15 +52,17 @@ import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class to handle and track job creation and status updates. */ -@Slf4j @Singleton public class JobCreationAndStatusUpdateHelper { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String JOB_ID_METADATA_KEY = "jobId"; private static final String ATTEMPT_NUMBER_METADATA_KEY = "attemptNumber"; @@ -183,7 +187,7 @@ private String parseIsJobRunningOnCustomConnectorForMetrics(final Job job) { } private void emitAttemptEvent(final OssMetricsRegistry metric, final Job job, final int attemptNumber) throws IOException { - emitAttemptEvent(metric, job, attemptNumber, Collections.emptyList()); + emitAttemptEvent(metric, job, attemptNumber, imageAttrsFromJob(job)); } private void emitAttemptEvent(final OssMetricsRegistry metric, @@ -230,7 +234,7 @@ private static T getOrNull(final List list, final int index) { } } - private void emitAttemptCompletedEvent(final Job job, final Attempt attempt) throws IOException { + private void emitAttemptCompletedEvent(final Job job, final Attempt attempt) { final Optional failureOrigin = attempt.getFailureSummary().flatMap(summary -> summary.getFailures() .stream() .map(FailureReason::getFailureOrigin) @@ -245,13 +249,36 @@ private void emitAttemptCompletedEvent(final Job job, final Attempt attempt) thr .map(MetricTags::getFailureType) .findFirst()); - final List additionalAttributes = List.of( - new MetricAttribute(MetricTags.ATTEMPT_OUTCOME, attempt.getStatus().toString()), - new MetricAttribute(MetricTags.FAILURE_ORIGIN, failureOrigin.orElse(null)), - new MetricAttribute(MetricTags.FAILURE_TYPE, failureType.orElse(null)), - new MetricAttribute(MetricTags.ATTEMPT_QUEUE, attempt.getProcessingTaskQueue())); + final List additionalAttributes = new ArrayList<>(); + additionalAttributes.add(new MetricAttribute(MetricTags.ATTEMPT_OUTCOME, attempt.getStatus().toString())); + additionalAttributes.add(new MetricAttribute(MetricTags.FAILURE_ORIGIN, failureOrigin.orElse(null))); + additionalAttributes.add(new MetricAttribute(MetricTags.FAILURE_TYPE, failureType.orElse(null))); + additionalAttributes.add(new MetricAttribute(MetricTags.ATTEMPT_QUEUE, attempt.getProcessingTaskQueue())); + additionalAttributes.addAll(imageAttrsFromJob(job)); + + try { + emitAttemptEvent(OssMetricsRegistry.ATTEMPTS_COMPLETED, job, attempt.getAttemptNumber(), additionalAttributes); + } catch (final IOException e) { + log.info("Failed to record attempt completed metric for attempt {} of job {}", attempt.getAttemptNumber(), job.getId()); + } + } + + private List imageAttrsFromJob(final Job job) { + final List attrs = new ArrayList<>(); + if (job.getConfigType() == SYNC) { + final var config = job.getConfig().getSync(); + attrs.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, config.getSourceDockerImage())); + attrs.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, config.getDestinationDockerImage())); + } else if (job.getConfigType() == REFRESH) { + final var config = job.getConfig().getRefresh(); + attrs.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, config.getSourceDockerImage())); + attrs.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, config.getDestinationDockerImage())); + } else if (job.getConfigType() == RESET_CONNECTION) { + final var config = job.getConfig().getResetConnection(); + attrs.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, config.getDestinationDockerImage())); + } - emitAttemptEvent(OssMetricsRegistry.ATTEMPTS_COMPLETED, job, attempt.getAttemptNumber(), additionalAttributes); + return attrs; } @VisibleForTesting @@ -280,31 +307,27 @@ public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final List additionalAttributes = new ArrayList<>(); if (job.getConfigType() == SYNC) { final var sync = job.getConfig().getSync(); - additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, sync.getSourceDockerImage())); additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE_IS_DEFAULT, String.valueOf(sync.getSourceDockerImageIsDefault()))); - additionalAttributes.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, sync.getDestinationDockerImage())); additionalAttributes .add(new MetricAttribute(MetricTags.DESTINATION_IMAGE_IS_DEFAULT, String.valueOf(sync.getDestinationDockerImageIsDefault()))); additionalAttributes.add(new MetricAttribute(MetricTags.WORKSPACE_ID, sync.getWorkspaceId().toString())); } else if (job.getConfigType() == REFRESH) { final var refresh = job.getConfig().getRefresh(); - additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, refresh.getSourceDockerImage())); - additionalAttributes.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, refresh.getDestinationDockerImage())); additionalAttributes.add(new MetricAttribute(MetricTags.WORKSPACE_ID, refresh.getWorkspaceId().toString())); } + additionalAttributes.addAll(imageAttrsFromJob(job)); emitToReleaseStagesMetricHelper(metric, job, additionalAttributes); } public void emitJobToReleaseStagesMetric(final OssMetricsRegistry metric, final Job job, final JobFailureRequest input) throws IOException { - List additionalAttributes = new ArrayList<>(); + final List additionalAttributes = new ArrayList<>(); if (job.getConfigType() == SYNC) { final var sync = job.getConfig().getSync(); - additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE, sync.getSourceDockerImage())); additionalAttributes.add(new MetricAttribute(MetricTags.SOURCE_IMAGE_IS_DEFAULT, String.valueOf(sync.getSourceDockerImageIsDefault()))); - additionalAttributes.add(new MetricAttribute(MetricTags.DESTINATION_IMAGE, sync.getDestinationDockerImage())); additionalAttributes .add(new MetricAttribute(MetricTags.DESTINATION_IMAGE_IS_DEFAULT, String.valueOf(sync.getDestinationDockerImageIsDefault()))); additionalAttributes.add(new MetricAttribute(MetricTags.WORKSPACE_ID, sync.getWorkspaceId().toString())); + additionalAttributes.addAll(imageAttrsFromJob(job)); job.getLastAttempt().flatMap(Attempt::getFailureSummary) .ifPresent(attemptFailureSummary -> { for (FailureReason failureReason : attemptFailureSummary.getFailures()) { @@ -340,7 +363,7 @@ public void trackCompletion(final Job job, final JobStatus status) throws IOExce jobTracker.trackSync(job, Enums.convertTo(status, JobState.class)); } - private void emitAttemptCompletedEventIfAttemptPresent(final Job job) throws IOException { + public void emitAttemptCompletedEventIfAttemptPresent(final Job job) { if (job == null) { return; } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/NotificationHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/NotificationHelper.java index 54c51d3dea8..ccc13675947 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/NotificationHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/NotificationHelper.java @@ -65,13 +65,12 @@ private SchemaUpdateNotification getSchemaUpdateNotification(final NotificationS final String sourceUrl = webUrlHelper.getSourceUrl(workspace.getWorkspaceId(), source.getSourceId()); final boolean isBreakingChange = applySchemaChangeHelper.containsBreakingChange(diff); - final SchemaUpdateNotification notification = SchemaUpdateNotification.builder() - .sourceInfo(SourceInfo.builder().name(source.getName()).id(source.getSourceId()).url(sourceUrl).build()) - .connectionInfo(ConnectionInfo.builder().name(connection.getName()).id(connection.getConnectionId()).url(connectionUrl).build()) - .workspace(WorkspaceInfo.builder().name(workspace.getName()).id(workspace.getWorkspaceId()).url(workspaceUrl).build()) - .catalogDiff(diff) - .isBreakingChange(isBreakingChange) - .build(); + final SchemaUpdateNotification notification = new SchemaUpdateNotification( + new WorkspaceInfo(workspace.getWorkspaceId(), workspace.getName(), workspaceUrl), + new ConnectionInfo(connection.getConnectionId(), connection.getName(), connectionUrl), + new SourceInfo(source.getSourceId(), source.getName(), sourceUrl), + isBreakingChange, + diff); return notification; } catch (final Exception e) { LOGGER.error("Failed to build notification {}: {}", workspace, e); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthHelper.java new file mode 100644 index 00000000000..0abd934f652 --- /dev/null +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthHelper.java @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.node.BooleanNode; +import com.fasterxml.jackson.databind.node.TextNode; +import io.airbyte.api.model.generated.CompleteOAuthResponse; +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.OAuthConfigSpecification; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Static helpers for Oauth-related reading and writing. + */ +public class OAuthHelper { + + private static final String PROPERTIES = "properties"; + private static final String PATH_IN_CONNECTOR_CONFIG = "path_in_connector_config"; + + /** + * Extract fields names from oauth spec. + * + * @param oauthSpec oauth spec + * @return field names in the spec + */ + public static Map> extractOauthConfigurationPaths(final JsonNode oauthSpec) { + + if (oauthSpec != null && oauthSpec.has(PROPERTIES) && oauthSpec.get(PROPERTIES).isObject()) { + final Map> result = new HashMap<>(); + + oauthSpec.get(PROPERTIES).fields().forEachRemaining(entry -> { + final JsonNode value = entry.getValue(); + if (value.isObject() && value.has(PATH_IN_CONNECTOR_CONFIG) && value.get(PATH_IN_CONNECTOR_CONFIG).isArray()) { + final List path = new ArrayList<>(); + for (final JsonNode pathPart : value.get(PATH_IN_CONNECTOR_CONFIG)) { + path.add(pathPart.textValue()); + } + result.put(entry.getKey(), path); + } + }); + + return result; + } else { + return new HashMap<>(); + } + } + + /** + * Map to the result of a completeOauth request to an API response. + * + * @param input input + * @return complete oauth response + */ + public static CompleteOAuthResponse mapToCompleteOAuthResponse(final Map input) { + final CompleteOAuthResponse response = new CompleteOAuthResponse(); + response.setAuthPayload(new HashMap<>()); + + if (input.containsKey("request_succeeded")) { + response.setRequestSucceeded("true".equals(input.get("request_succeeded"))); + } else { + response.setRequestSucceeded(true); + } + + if (input.containsKey("request_error")) { + response.setRequestError(input.get("request_error").toString()); + } + + input.forEach((k, v) -> { + if (!"request_succeeded".equals(k) && !"request_error".equals(k)) { + response.getAuthPayload().put(k, v); + } + }); + + return response; + } + + /** + * Update the oauthUserInputFromConnectorConfigSpecification to allow for additional properties. The + * testing values must define the required values, but can send along additional fields from the + * testing values as well. TODO: Protocolize that this must always be set to true? + */ + public static void updateOauthConfigToAcceptAdditionalUserInputProperties( + final OAuthConfigSpecification oauthConfigSpecification) { + final JsonNode userInputNode = oauthConfigSpecification.getOauthUserInputFromConnectorConfigSpecification(); + final JsonNode updatedNode = Jsons.getNodeOrEmptyObject(userInputNode); + + Jsons.setNestedValue(updatedNode, List.of("type"), TextNode.valueOf("object")); + Jsons.setNestedValue(updatedNode, List.of("additionalProperties"), BooleanNode.TRUE); + + oauthConfigSpecification.setOauthUserInputFromConnectorConfigSpecification(updatedNode); + } + +} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractor.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractor.java deleted file mode 100644 index 21db39479b7..00000000000 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractor.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Extract paths to oauth fields from an oauth spec. - */ -public class OAuthPathExtractor { - - private static final String PROPERTIES = "properties"; - private static final String PATH_IN_CONNECTOR_CONFIG = "path_in_connector_config"; - - /** - * Extract fields names from oauth spec. - * - * @param oauthSpec oauth spec - * @return field names in the spec - */ - public static Map> extractOauthConfigurationPaths(final JsonNode oauthSpec) { - - if (oauthSpec != null && oauthSpec.has(PROPERTIES) && oauthSpec.get(PROPERTIES).isObject()) { - final Map> result = new HashMap<>(); - - oauthSpec.get(PROPERTIES).fields().forEachRemaining(entry -> { - final JsonNode value = entry.getValue(); - if (value.isObject() && value.has(PATH_IN_CONNECTOR_CONFIG) && value.get(PATH_IN_CONNECTOR_CONFIG).isArray()) { - final List path = new ArrayList<>(); - for (final JsonNode pathPart : value.get(PATH_IN_CONNECTOR_CONFIG)) { - path.add(pathPart.textValue()); - } - result.put(entry.getKey(), path); - } - }); - - return result; - } else { - return new HashMap<>(); - } - } - -} diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthSecretHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthSecretHelper.java index 666ca6aeff3..2beb01fd0fb 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthSecretHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/OAuthSecretHelper.java @@ -18,12 +18,10 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import lombok.extern.slf4j.Slf4j; /** * Holds helpers to handle OAuth secrets. */ -@Slf4j public class OAuthSecretHelper { /** @@ -56,7 +54,8 @@ public static JsonNode setSecretsInConnectionConfiguration(final ConnectorSpecif * path_in_connector_config i.e. { client_id: ['credentials', 'client_id']} */ @VisibleForTesting - public static Map> getAdvancedAuthOAuthPaths(final ConnectorSpecification connectorSpecification, boolean includeOutputPaths) + public static Map> getAdvancedAuthOAuthPaths(final ConnectorSpecification connectorSpecification, + final boolean includeOutputPaths) throws JsonValidationException { if (OAuthConfigSupplier.hasOAuthConfigSpecification(connectorSpecification)) { final JsonNode completeOAuthOutputSpecification = @@ -65,9 +64,9 @@ public static Map> getAdvancedAuthOAuthPaths(final Connecto connectorSpecification.getAdvancedAuth().getOauthConfigSpecification().getCompleteOauthServerOutputSpecification(); // Merge all the mappings into one map - Map> result = new HashMap<>(OAuthPathExtractor.extractOauthConfigurationPaths(completeOAuthServerOutputSpecification)); + final Map> result = new HashMap<>(OAuthHelper.extractOauthConfigurationPaths(completeOAuthServerOutputSpecification)); if (includeOutputPaths) { - result.putAll(OAuthPathExtractor.extractOauthConfigurationPaths(completeOAuthOutputSpecification)); + result.putAll(OAuthHelper.extractOauthConfigurationPaths(completeOAuthOutputSpecification)); } return result; } else { @@ -83,7 +82,7 @@ public static Map> getAdvancedAuthOAuthPaths(final Connecto * @param spec - connector specification to get paths for * @return Map where the key = the property and the value = the path to the property in list form. */ - public static Map> getOAuthConfigPaths(ConnectorSpecification spec) throws JsonValidationException { + public static Map> getOAuthConfigPaths(final ConnectorSpecification spec) throws JsonValidationException { if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { return getAdvancedAuthOAuthPaths(spec, true); } else { @@ -98,7 +97,7 @@ public static Map> getOAuthConfigPaths(ConnectorSpecificati * @param spec - connector specification to get paths for * @return Map where the key = the property and the value = the path to the property in list form. */ - public static Map> getOAuthInputPaths(ConnectorSpecification spec) throws JsonValidationException { + public static Map> getOAuthInputPaths(final ConnectorSpecification spec) throws JsonValidationException { if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { return getAdvancedAuthOAuthPaths(spec, false); } else { @@ -121,7 +120,7 @@ public static Map> getCompleteOauthServerOutputPaths(final connectorSpecification.getAdvancedAuth().getOauthConfigSpecification().getCompleteOauthServerOutputSpecification(); // Merge all the mappings into one map - return new HashMap<>(OAuthPathExtractor.extractOauthConfigurationPaths(completeOAuthServerOutputSpecification)); + return new HashMap<>(OAuthHelper.extractOauthConfigurationPaths(completeOAuthServerOutputSpecification)); } else { throw new JsonValidationException( String.format("Error parsing advancedAuth - see [%s]", connectorSpecification.getDocumentationUrl())); @@ -150,10 +149,10 @@ public static ConnectorSpecification validateOauthParamConfigAndReturnAdvancedAu final JsonNode oauthParamConfiguration) throws JsonValidationException { if (OAuthConfigSupplier.hasOAuthConfigSpecification(connectorSpecification)) { - JsonNode newConnectorSpecificationNode = Jsons.emptyObject(); - Map airbyteSecret = Map.of("airbyte_secret", true); + final JsonNode newConnectorSpecificationNode = Jsons.emptyObject(); + final Map airbyteSecret = Map.of("airbyte_secret", true); final Map> oauthPaths = OAuthSecretHelper.getCompleteOauthServerOutputPaths(connectorSpecification); - for (Entry> entry : oauthPaths.entrySet()) { + for (final Entry> entry : oauthPaths.entrySet()) { final List jsonPathList = entry.getValue(); if (Jsons.navigateTo(oauthParamConfiguration, jsonPathList) == null) { throw new BadObjectSchemaKnownException(String.format("Missing OAuth param for key at %s", jsonPathList)); @@ -177,9 +176,9 @@ public static ConnectorSpecification validateOauthParamConfigAndReturnAdvancedAu */ private static List alternatingList(final String property, final List list) { - List result = new ArrayList(list.size() * 2); + final List result = new ArrayList(list.size() * 2); - for (String item : list) { + for (final String item : list) { result.add(property); result.add(item); } @@ -194,7 +193,7 @@ public static void validateNoSecretsInConfiguration(final ConnectorSpecification final JsonNode connectionConfiguration) throws JsonValidationException { if (OAuthConfigSupplier.hasOAuthConfigSpecification(spec)) { - Map> oauthPaths = getOAuthInputPaths(spec); + final Map> oauthPaths = getOAuthInputPaths(spec); for (final Entry> entry : oauthPaths.entrySet()) { final String key = entry.getKey(); final List jsonPathList = entry.getValue(); @@ -204,11 +203,11 @@ public static void validateNoSecretsInConfiguration(final ConnectorSpecification } } - private static void throwIfKeyExistsInConfig(JsonNode connectionConfiguration, String key, List jsonPathList) { + private static void throwIfKeyExistsInConfig(final JsonNode connectionConfiguration, final String key, final List jsonPathList) { if (Jsons.navigateTo(connectionConfiguration, jsonPathList) != null) { // The API referenced by this message is a Cloud feature and not yet available in the open source // project but will be added. - String errorMessage = String.format( + final String errorMessage = String.format( "Cannot set key '%s', please create an OAuth credentials override instead - https://reference.airbyte.com/reference/workspaceoauthcredentials", key); throw new BadObjectSchemaKnownException(errorMessage); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/StatsAggregationHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/StatsAggregationHelper.java index 1f66076da0d..7a7b5e4bc06 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/StatsAggregationHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/StatsAggregationHelper.java @@ -20,6 +20,7 @@ import io.airbyte.persistence.job.JobPersistence.AttemptStats; import io.airbyte.persistence.job.JobPersistence.JobAttemptPair; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -27,15 +28,17 @@ import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class to aggregate stream stats. The class is meant to be used to aggregate stats for a * single stream across multiple attempts */ -@Slf4j public class StatsAggregationHelper { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + /** * WARNING! billing uses the stats that this method returns. Be careful when changing this method. * diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/TemporalEventRunner.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/TemporalEventRunner.java index 7591a827500..cbbc52564df 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/TemporalEventRunner.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduler/TemporalEventRunner.java @@ -12,16 +12,18 @@ import java.util.List; import java.util.Set; import java.util.UUID; -import lombok.AllArgsConstructor; /** * Temporal event client. For triggering events on connections. */ -@AllArgsConstructor public class TemporalEventRunner implements EventRunner { private final TemporalClient temporalClient; + public TemporalEventRunner(TemporalClient temporalClient) { + this.temporalClient = temporalClient; + } + @Override @Trace public void createConnectionManagerWorkflow(final UUID connectionId) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AirbyteHttpRequestFieldExtractor.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AirbyteHttpRequestFieldExtractor.java index 7b131c75ab1..57a02d622ad 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AirbyteHttpRequestFieldExtractor.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AirbyteHttpRequestFieldExtractor.java @@ -8,17 +8,20 @@ import io.airbyte.commons.json.Jsons; import io.micronaut.core.util.StringUtils; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.util.Optional; import java.util.Set; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility class that facilitates the extraction of values from HTTP request POST bodies. */ @Singleton -@Slf4j public class AirbyteHttpRequestFieldExtractor { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + // For some APIs we asked for a list of ids, such as workspace IDs and connection IDs. We will // validate if user has permission // to all of them. diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java index ca50a04e5ad..848d202a04c 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthNettyServerCustomizer.java @@ -13,7 +13,6 @@ import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpRequestDecoder; import jakarta.inject.Singleton; -import lombok.extern.slf4j.Slf4j; /** * Custom Netty customizer that registers the {@link AuthorizationServerHandler} with the Netty @@ -24,7 +23,6 @@ * determine authorization. */ @Singleton -@Slf4j public class AuthNettyServerCustomizer implements BeanCreatedEventListener { private final AuthorizationServerHandler authorizationServerHandler; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthenticationHeaderResolver.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthenticationHeaderResolver.java index 401fc159aec..dc9f5351a37 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthenticationHeaderResolver.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthenticationHeaderResolver.java @@ -33,6 +33,7 @@ import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -40,15 +41,17 @@ import java.util.Map; import java.util.Set; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Resolves organization or workspace IDs from HTTP headers. */ -@Slf4j @Singleton public class AuthenticationHeaderResolver { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final WorkspaceHelper workspaceHelper; private final PermissionHandler permissionHandler; private final UserPersistence userPersistence; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthorizationServerHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthorizationServerHandler.java index a3ad53c4bca..430102baf1f 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthorizationServerHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthorizationServerHandler.java @@ -13,9 +13,11 @@ import io.netty.handler.codec.http.FullHttpRequest; import io.netty.handler.codec.http.HttpHeaders; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.nio.charset.StandardCharsets; import java.util.Optional; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Custom Netty {@link ChannelDuplexHandler} that intercepts all operations to ensure that headers @@ -23,9 +25,10 @@ */ @Singleton @Sharable -@Slf4j public class AuthorizationServerHandler extends ChannelDuplexHandler { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AirbyteHttpRequestFieldExtractor airbyteHttpRequestFieldExtractor; public AuthorizationServerHandler(final AirbyteHttpRequestFieldExtractor airbyteHttpRequestFieldExtractor) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/CommunityCurrentUserService.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/CommunityCurrentUserService.java index 5df2b5b7891..2e1b4984ab1 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/CommunityCurrentUserService.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/CommunityCurrentUserService.java @@ -7,7 +7,9 @@ import io.airbyte.config.AuthenticatedUser; import io.airbyte.config.persistence.UserPersistence; import io.micronaut.runtime.http.scope.RequestScope; -import lombok.extern.slf4j.Slf4j; +import java.lang.invoke.MethodHandles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implementation of {@link CurrentUserService} that uses the default user from the @@ -16,10 +18,11 @@ * `@RequestScope` means one bean is created per request, so the default user is cached for any * subsequent calls to getCurrentUser() within the same request. */ -@Slf4j @RequestScope public class CommunityCurrentUserService implements CurrentUserService { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final UserPersistence userPersistence; private AuthenticatedUser retrievedDefaultUser; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/RbacRoleHelper.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/RbacRoleHelper.java index 8b4d4e20e74..5618c38e496 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/RbacRoleHelper.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/RbacRoleHelper.java @@ -16,6 +16,7 @@ import io.micronaut.http.HttpRequest; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.Collection; import java.util.Comparator; import java.util.HashSet; @@ -25,12 +26,14 @@ import java.util.Optional; import java.util.Set; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Singleton -@Slf4j public class RbacRoleHelper { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AuthenticationHeaderResolver headerResolver; private final PermissionPersistence permissionPersistence; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/SecurityAwareCurrentUserService.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/SecurityAwareCurrentUserService.java index cb16085d2c2..8177b85fe7f 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/SecurityAwareCurrentUserService.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/SecurityAwareCurrentUserService.java @@ -11,7 +11,9 @@ import io.micronaut.context.annotation.Requires; import io.micronaut.runtime.http.scope.RequestScope; import io.micronaut.security.utils.SecurityService; -import lombok.extern.slf4j.Slf4j; +import java.lang.invoke.MethodHandles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Interface for retrieving the current Airbyte User associated with the current request. Replaces @@ -23,9 +25,10 @@ @Requires(property = "micronaut.security.enabled", value = "true") @Replaces(CommunityCurrentUserService.class) -@Slf4j public class SecurityAwareCurrentUserService implements CurrentUserService { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final UserPersistence userPersistence; private final SecurityService securityService; private AuthenticatedUser retrievedCurrentUser; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/validation/CommunityActorDefinitionAccessValidator.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/validation/CommunityActorDefinitionAccessValidator.java index b46fa8d2278..4a1e6f0352d 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/validation/CommunityActorDefinitionAccessValidator.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/validation/CommunityActorDefinitionAccessValidator.java @@ -7,14 +7,12 @@ import io.airbyte.commons.server.errors.ApplicationErrorKnownException; import jakarta.inject.Singleton; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; /** * Default Community edition implementation of {@link ActorDefinitionAccessValidator}. Does nothing, * because Community edition does not have any access restrictions/auth. */ @Singleton -@Slf4j public class CommunityActorDefinitionAccessValidator implements ActorDefinitionAccessValidator { @Override diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/validation/EnterpriseActorDefinitionAccessValidator.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/validation/EnterpriseActorDefinitionAccessValidator.java index 942e6ac549a..0a71db3dea3 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/validation/EnterpriseActorDefinitionAccessValidator.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/validation/EnterpriseActorDefinitionAccessValidator.java @@ -15,7 +15,6 @@ import io.micronaut.security.utils.SecurityService; import jakarta.inject.Singleton; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; /** * Enterprise edition implementation of {@link ActorDefinitionAccessValidator}. Allows any @@ -27,7 +26,6 @@ */ @Singleton @RequiresAirbyteProEnabled -@Slf4j @SuppressWarnings({"PMD.PreserveStackTrace", "PMD.ExceptionAsFlowControl"}) @Replaces(CommunityActorDefinitionAccessValidator.class) public class EnterpriseActorDefinitionAccessValidator implements ActorDefinitionAccessValidator { diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/IdTypes.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/IdTypes.kt new file mode 100644 index 00000000000..89e07f1607f --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/IdTypes.kt @@ -0,0 +1,18 @@ +package io.airbyte.commons.server + +import java.util.UUID + +/** + * This file contains type-safe wrappers around UUIDs for various entities in the system. + * These are used to prevent bugs where the wrong UUID is passed to a function. + */ + +@JvmInline +value class ConnectionId( + val value: UUID, +) + +@JvmInline +value class OrganizationId( + val value: UUID, +) diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelper.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelper.kt index e520b361bd5..17873d4da79 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelper.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelper.kt @@ -5,6 +5,7 @@ import io.airbyte.api.model.generated.PermissionType import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest import io.airbyte.api.problems.model.generated.ProblemMessageData import io.airbyte.api.problems.throwable.generated.ForbiddenProblem +import io.airbyte.commons.annotation.InternalForTesting import io.airbyte.commons.auth.AuthRoleInterface import io.airbyte.commons.json.Jsons import io.airbyte.commons.server.handlers.PermissionHandler @@ -29,46 +30,12 @@ private val logger = KotlinLogging.logger {} * for any API endpoint that requires authorization and doesn't go through the CloudAuthenticationProvider. */ @Singleton -class ApiAuthorizationHelper( +open class ApiAuthorizationHelper( private val authorizationHeaderResolver: AuthenticationHeaderResolver, private val permissionHandler: PermissionHandler, private val currentUserService: CurrentUserService, private val rbacRoleHelper: RbacRoleHelper, ) { - private fun resolveIdsToWorkspaceIds( - ids: List, - scope: Scope, - ): List? { - val properties = - when (scope) { - Scope.WORKSPACE -> { - buildPropertiesMapForWorkspaces(ids) - } - Scope.WORKSPACES -> { - buildPropertiesMapForWorkspaces(ids) - } - Scope.SOURCE -> { - buildPropertiesMapForSource(ids.first()) - } - Scope.DESTINATION -> { - buildPropertiesMapForDestination(ids.first()) - } - Scope.CONNECTION -> { - buildPropertiesMapForConnection(ids.first()) - } - Scope.JOB -> { - buildPropertiesMapForJob(ids.first()) - } - Scope.ORGANIZATION -> { - throw ForbiddenProblem(ProblemMessageData().message("Cannot resolve organization Ids to workspace Ids.")) - } - Scope.PERMISSION -> { - buildPropertiesMapForPermission(ids.first()) - } - } - return authorizationHeaderResolver.resolveWorkspace(properties) - } - /** * Given a scoped ID, confirm that the current user has the given permission type. * @@ -79,12 +46,13 @@ class ApiAuthorizationHelper( * * @throws ForbiddenProblem - If the user does not have the required permissions */ - fun checkWorkspacePermissions( + fun checkWorkspacePermission( id: String, scope: Scope, - permissionTypes: Set, + userId: UUID, + permissionTypes: PermissionType, ) { - checkWorkspacePermissions(listOf(id), scope, currentUserService.currentUser.userId, permissionTypes) + checkWorkspacesPermissions(listOf(id), scope, userId, setOf(permissionTypes)) } /** @@ -98,32 +66,33 @@ class ApiAuthorizationHelper( * * @throws ForbiddenProblem - If the user does not have the required permissions */ - fun checkWorkspacePermissions( + fun checkWorkspacesPermission( ids: List, scope: Scope, userId: UUID, - permissionType: PermissionType, + permissionTypes: PermissionType, ) { - checkWorkspacePermissions(ids, scope, userId, setOf(permissionType)) + checkWorkspacesPermissions(ids, scope, userId, setOf(permissionTypes)) } /** - * Given a list of scoped IDs, confirm that the current user has the - * given workspace permission type. + * Given a list of scoped IDs and a user ID, confirm that the indicated user + * has the given permission type. * * @param ids - The Ids we are checking permissions for * @param scope - The scope of the Ids - * @param permissionTypes - the set of permissions needed to access the resource(s). - * If the user has any of the permissions, the check will pass. + * @param userId - The ID of the user we are checking permissions for + * @param permissionTypes - the permission needed to access the resource(s) * * @throws ForbiddenProblem - If the user does not have the required permissions */ - fun checkWorkspacePermissions( - ids: List, + fun checkWorkspacesPermissions( + id: String, scope: Scope, + userId: UUID, permissionTypes: Set, ) { - checkWorkspacePermissions(ids, scope, currentUserService.currentUser.userId, permissionTypes) + checkWorkspacesPermissions(listOf(id), scope, userId, permissionTypes) } /** @@ -138,7 +107,8 @@ class ApiAuthorizationHelper( * * @throws ForbiddenProblem - If the user does not have the required permissions */ - fun checkWorkspacePermissions( + @InternalForTesting + internal fun checkWorkspacesPermissions( ids: List, scope: Scope, userId: UUID, @@ -230,71 +200,62 @@ class ApiAuthorizationHelper( } } + private fun resolveIdsToWorkspaceIds( + ids: List, + scope: Scope, + ): List? { + val properties = + when (scope) { + Scope.WORKSPACE -> buildPropertiesMapForWorkspaces(ids) + Scope.WORKSPACES -> buildPropertiesMapForWorkspaces(ids) + Scope.SOURCE -> buildPropertiesMapForSource(ids.first()) + Scope.DESTINATION -> buildPropertiesMapForDestination(ids.first()) + Scope.CONNECTION -> buildPropertiesMapForConnection(ids.first()) + Scope.JOB -> buildPropertiesMapForJob(ids.first()) + Scope.ORGANIZATION -> throw ForbiddenProblem(ProblemMessageData().message("Cannot resolve organization Ids to workspace Ids.")) + Scope.PERMISSION -> buildPropertiesMapForPermission(ids.first()) + } + + return authorizationHeaderResolver.resolveWorkspace(properties) + } + /** * Just resolves Ids to either a workspace or organization per the scope. */ private fun buildIdHeaderMap( ids: List, scope: Scope, - ): Map { - return when (scope) { - Scope.WORKSPACE -> { - buildPropertiesMapForWorkspaces(ids) - } - Scope.WORKSPACES -> { - buildPropertiesMapForWorkspaces(ids) - } - Scope.SOURCE -> { - buildPropertiesMapForSource(ids.first()) - } - Scope.DESTINATION -> { - buildPropertiesMapForDestination(ids.first()) - } - Scope.CONNECTION -> { - buildPropertiesMapForConnection(ids.first()) - } - Scope.JOB -> { - buildPropertiesMapForJob(ids.first()) - } - Scope.ORGANIZATION -> { - buildPropertiesMapForOrganization(ids.first()) - } - Scope.PERMISSION -> { - buildPropertiesMapForPermission(ids.first()) - } + ): Map = + when (scope) { + Scope.WORKSPACE -> buildPropertiesMapForWorkspaces(ids) + Scope.WORKSPACES -> buildPropertiesMapForWorkspaces(ids) + Scope.SOURCE -> buildPropertiesMapForSource(ids.first()) + Scope.DESTINATION -> buildPropertiesMapForDestination(ids.first()) + Scope.CONNECTION -> buildPropertiesMapForConnection(ids.first()) + Scope.JOB -> buildPropertiesMapForJob(ids.first()) + Scope.ORGANIZATION -> buildPropertiesMapForOrganization(ids.first()) + Scope.PERMISSION -> buildPropertiesMapForPermission(ids.first()) } - } - private fun buildPropertiesMapForPermission(id: String): Map { - return mapOf(Scope.PERMISSION.mappedHeaderProperty to id) - } + private fun buildPropertiesMapForPermission(id: String): Map = mapOf(Scope.PERMISSION.mappedHeaderProperty to id) - private fun buildPropertiesMapForOrganization(id: String): Map { - return mapOf(Scope.ORGANIZATION.mappedHeaderProperty to id) - } + private fun buildPropertiesMapForOrganization(id: String): Map = mapOf(Scope.ORGANIZATION.mappedHeaderProperty to id) - private fun buildPropertiesMapForConnection(id: String): Map { - return mapOf(Scope.CONNECTION.mappedHeaderProperty to id) - } + private fun buildPropertiesMapForConnection(id: String): Map = mapOf(Scope.CONNECTION.mappedHeaderProperty to id) - private fun buildPropertiesMapForSource(id: String): Map { - return mapOf(Scope.SOURCE.mappedHeaderProperty to id) - } + private fun buildPropertiesMapForSource(id: String): Map = mapOf(Scope.SOURCE.mappedHeaderProperty to id) - private fun buildPropertiesMapForDestination(id: String): Map { - return mapOf(Scope.DESTINATION.mappedHeaderProperty to id) - } + private fun buildPropertiesMapForDestination(id: String): Map = mapOf(Scope.DESTINATION.mappedHeaderProperty to id) - private fun buildPropertiesMapForWorkspaces(ids: List): Map { - return mapOf(Scope.WORKSPACES.mappedHeaderProperty to Jsons.serialize(ids)) - } + private fun buildPropertiesMapForWorkspaces(ids: List): Map = + mapOf(Scope.WORKSPACES.mappedHeaderProperty to Jsons.serialize(ids)) - private fun buildPropertiesMapForJob(id: String): Map { - return mapOf(Scope.JOB.mappedHeaderProperty to id) - } + private fun buildPropertiesMapForJob(id: String): Map = mapOf(Scope.JOB.mappedHeaderProperty to id) } -enum class Scope(val mappedHeaderProperty: String) { +enum class Scope( + val mappedHeaderProperty: String, +) { WORKSPACE(WORKSPACE_IDS_HEADER), WORKSPACES(WORKSPACE_IDS_HEADER), CONNECTION(CONNECTION_ID_HEADER), diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt index f5269704bc9..b57a49cf4c6 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt @@ -15,6 +15,7 @@ import io.airbyte.api.model.generated.ConnectorRolloutRead import io.airbyte.api.model.generated.ConnectorRolloutRequestBody import io.airbyte.api.model.generated.ConnectorRolloutStartRequestBody import io.airbyte.api.model.generated.ConnectorRolloutState +import io.airbyte.api.model.generated.ConnectorRolloutStateTerminal import io.airbyte.api.model.generated.ConnectorRolloutStrategy import io.airbyte.api.model.generated.ConnectorRolloutUpdateStateRequestBody import io.airbyte.api.problems.model.generated.ProblemMessageData @@ -62,40 +63,49 @@ open class ConnectorRolloutHandler private val rolloutActorFinder: RolloutActorFinder, ) { @VisibleForTesting - open fun buildConnectorRolloutRead(connectorRollout: ConnectorRollout): ConnectorRolloutRead { + open fun buildConnectorRolloutRead( + connectorRollout: ConnectorRollout, + withActorSyncAndSelectionInfo: Boolean, + ): ConnectorRolloutRead { val rolloutStrategy = connectorRollout.rolloutStrategy?.let { ConnectorRolloutStrategy.fromValue(it.toString()) } val actorDefinitionVersion = actorDefinitionService.getActorDefinitionVersion(connectorRollout.releaseCandidateVersionId) - return ConnectorRolloutRead() - .id(connectorRollout.id) - .dockerRepository(actorDefinitionVersion.dockerRepository) - .dockerImageTag(actorDefinitionVersion.dockerImageTag) - .workflowRunId(connectorRollout.workflowRunId) - .actorDefinitionId(connectorRollout.actorDefinitionId) - .releaseCandidateVersionId(connectorRollout.releaseCandidateVersionId) - .initialVersionId(connectorRollout.initialVersionId) - .state(ConnectorRolloutState.fromString(connectorRollout.state.toString())) - .initialRolloutPct(connectorRollout.initialRolloutPct?.toInt()) - .currentTargetRolloutPct(connectorRollout.currentTargetRolloutPct?.toInt()) - .finalTargetRolloutPct(connectorRollout.finalTargetRolloutPct?.toInt()) - .hasBreakingChanges(connectorRollout.hasBreakingChanges) - .rolloutStrategy(rolloutStrategy) - .maxStepWaitTimeMins(connectorRollout.maxStepWaitTimeMins?.toInt()) - .updatedAt(connectorRollout.updatedAt?.let { unixTimestampToOffsetDateTime(it) }) - .createdAt(connectorRollout.createdAt?.let { unixTimestampToOffsetDateTime(it) }) - .expiresAt(connectorRollout.expiresAt?.let { unixTimestampToOffsetDateTime(it) }) - .errorMsg(connectorRollout.errorMsg) - .failedReason(connectorRollout.failedReason) - .updatedBy( - connectorRollout.rolloutStrategy?.let { strategy -> - connectorRollout.updatedBy?.let { updatedBy -> - getUpdatedBy(strategy, updatedBy) - } - }, - ).completedAt(connectorRollout.completedAt?.let { unixTimestampToOffsetDateTime(it) }) - .expiresAt(connectorRollout.expiresAt?.let { unixTimestampToOffsetDateTime(it) }) - .errorMsg(connectorRollout.errorMsg) - .failedReason(connectorRollout.failedReason) + var rollout = + ConnectorRolloutRead() + .id(connectorRollout.id) + .dockerRepository(actorDefinitionVersion.dockerRepository) + .dockerImageTag(actorDefinitionVersion.dockerImageTag) + .workflowRunId(connectorRollout.workflowRunId) + .actorDefinitionId(connectorRollout.actorDefinitionId) + .releaseCandidateVersionId(connectorRollout.releaseCandidateVersionId) + .initialVersionId(connectorRollout.initialVersionId) + .state(ConnectorRolloutState.fromString(connectorRollout.state.toString())) + .initialRolloutPct(connectorRollout.initialRolloutPct?.toInt()) + .currentTargetRolloutPct(connectorRollout.currentTargetRolloutPct?.toInt()) + .finalTargetRolloutPct(connectorRollout.finalTargetRolloutPct?.toInt()) + .hasBreakingChanges(connectorRollout.hasBreakingChanges) + .rolloutStrategy(rolloutStrategy) + .maxStepWaitTimeMins(connectorRollout.maxStepWaitTimeMins?.toInt()) + .updatedAt(connectorRollout.updatedAt?.let { unixTimestampToOffsetDateTime(it) }) + .createdAt(connectorRollout.createdAt?.let { unixTimestampToOffsetDateTime(it) }) + .expiresAt(connectorRollout.expiresAt?.let { unixTimestampToOffsetDateTime(it) }) + .errorMsg(connectorRollout.errorMsg) + .failedReason(connectorRollout.failedReason) + .updatedBy( + connectorRollout.rolloutStrategy?.let { strategy -> + connectorRollout.updatedBy?.let { updatedBy -> + getUpdatedBy(strategy, updatedBy) + } + }, + ).completedAt(connectorRollout.completedAt?.let { unixTimestampToOffsetDateTime(it) }) + .expiresAt(connectorRollout.expiresAt?.let { unixTimestampToOffsetDateTime(it) }) + .errorMsg(connectorRollout.errorMsg) + .failedReason(connectorRollout.failedReason) + + if (withActorSyncAndSelectionInfo) { + rollout = rollout.actorSelectionInfo(getPinnedActorInfo(connectorRollout.id)).actorSyncs(getActorSyncInfo(connectorRollout.id)) + } + return rollout } @VisibleForTesting @@ -130,6 +140,8 @@ open class ConnectorRolloutHandler dockerImageTag: String, updatedBy: UUID, rolloutStrategy: ConnectorRolloutStrategy, + initialRolloutPct: Int?, + finalTargetRolloutPct: Int?, ): ConnectorRollout { val actorDefinitionVersion = actorDefinitionService.getActorDefinitionVersion( @@ -175,6 +187,8 @@ open class ConnectorRolloutHandler .withState(ConnectorEnumRolloutState.INITIALIZED) .withHasBreakingChanges(false) .withRolloutStrategy(ConnectorEnumRolloutStrategy.fromValue(rolloutStrategy.toString())) + .withInitialRolloutPct(initialRolloutPct?.toLong()) + .withFinalTargetRolloutPct(finalTargetRolloutPct?.toLong()) connectorRolloutService.writeConnectorRollout(connectorRollout) return connectorRollout } @@ -206,19 +220,26 @@ open class ConnectorRolloutHandler @VisibleForTesting open fun getAndValidateStartRequest(connectorRolloutStart: ConnectorRolloutStartRequestBody): ConnectorRollout { - val connectorRollout = connectorRolloutService.getConnectorRollout(connectorRolloutStart.id) - if (connectorRollout.state != ConnectorEnumRolloutState.INITIALIZED) { - throw ConnectorRolloutInvalidRequestProblem( - ProblemMessageData().message( - "Connector rollout must be in INITIALIZED state to start the rollout, but was in state " + connectorRollout.state.toString(), - ), - ) - } + // We expect to hit this code path under 2 different circumstances: + // 1. When a rollout is being started for the first time + // 2. When a rollout's Temporal workflow is being reset, e.g. for a bug fix. + // In case 1, the rollout will be in INITIALIZED state, and we'll change the state to WORKFLOW_STARTED. + // In case 2, the rollout may be in any state, and we only want to change it to WORKFLOW_STARTED if it was INITIALIZED. + // However, in case 2 the workflow will have a new run ID, so we still want to update that. + var connectorRollout = connectorRolloutService.getConnectorRollout(connectorRolloutStart.id) + if (connectorRollout.state == ConnectorEnumRolloutState.INITIALIZED) { + connectorRollout = + connectorRollout + .withState(ConnectorEnumRolloutState.WORKFLOW_STARTED) + .withRolloutStrategy(ConnectorEnumRolloutStrategy.fromValue(connectorRolloutStart.rolloutStrategy.toString())) + } + // Always update the workflow run ID if provided; if the workflow was restarted it will have changed + connectorRollout = + connectorRollout + .withWorkflowRunId(connectorRolloutStart.workflowRunId) + // Also include the version ID, for cases where the rollout wasn't automatically added to the rollouts table (i.e. for testing) + .withInitialVersionId(connectorRollout.initialVersionId) return connectorRollout - .withWorkflowRunId(connectorRolloutStart.workflowRunId) - .withInitialVersionId(connectorRollout.initialVersionId) - .withState(ConnectorEnumRolloutState.WORKFLOW_STARTED) - .withRolloutStrategy(ConnectorEnumRolloutStrategy.fromValue(connectorRolloutStart.rolloutStrategy.toString())) } @VisibleForTesting @@ -315,11 +336,17 @@ open class ConnectorRolloutHandler ), ) } + + // Unpin all actors that are pinned to the release candidate, unless the state is CANCELED and the user opted to retain pins on cancellation + // so that the same actors will be pinned to the next release candidate. + if (!(connectorRolloutFinalize.state == ConnectorRolloutStateTerminal.CANCELED && connectorRolloutFinalize.retainPinsOnCancellation)) { + actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion( + connectorRollout.actorDefinitionId, + connectorRollout.releaseCandidateVersionId, + ) + } + val currentTime = OffsetDateTime.now(ZoneOffset.UTC).toEpochSecond() - actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion( - connectorRollout.actorDefinitionId, - connectorRollout.releaseCandidateVersionId, - ) return connectorRollout .withState(ConnectorEnumRolloutState.fromValue(connectorRolloutFinalize.state.toString())) .withRolloutStrategy(ConnectorEnumRolloutStrategy.fromValue(connectorRolloutFinalize.rolloutStrategy.toString())) @@ -357,7 +384,7 @@ open class ConnectorRolloutHandler open fun listConnectorRollouts(): List { val connectorRollouts: List = connectorRolloutService.listConnectorRollouts() return connectorRollouts.map { connectorRollout -> - buildConnectorRolloutRead(connectorRollout) + buildConnectorRolloutRead(connectorRollout, false) } } @@ -365,7 +392,7 @@ open class ConnectorRolloutHandler val connectorRollouts: List = connectorRolloutService.listConnectorRollouts(actorDefinitionId) return connectorRollouts.map { connectorRollout -> - buildConnectorRolloutRead(connectorRollout) + buildConnectorRolloutRead(connectorRollout, false) } } @@ -383,34 +410,46 @@ open class ConnectorRolloutHandler actorDefinitionVersion.get().versionId, ) return connectorRollouts.map { connectorRollout -> - buildConnectorRolloutRead(connectorRollout) + buildConnectorRolloutRead(connectorRollout, false) } } @Transactional("config") open fun startConnectorRollout(connectorRolloutStart: ConnectorRolloutStartRequestBody): ConnectorRolloutRead { val connectorRollout = getAndValidateStartRequest(connectorRolloutStart) + + // If actors are still pinned to a previous rollout's release candidate, we migrate them to the new release candidate + if (connectorRolloutStart.migratePins) { + actorDefinitionVersionUpdater.migrateReleaseCandidatePins( + connectorRollout.actorDefinitionId, + connectorRolloutService.listConnectorRollouts(connectorRollout.actorDefinitionId).map { it.id.toString() }, + connectorRollout.id.toString(), + connectorRollout.releaseCandidateVersionId, + ) + connectorRollout.initialRolloutPct = getActorSelectionInfo(connectorRollout, 0).percentagePinned.toLong() + } + val updatedConnectorRollout = connectorRolloutService.writeConnectorRollout(connectorRollout) - return buildConnectorRolloutRead(updatedConnectorRollout) + return buildConnectorRolloutRead(updatedConnectorRollout, true) } @Transactional("config") open fun doConnectorRollout(connectorRolloutUpdate: ConnectorRolloutRequestBody): ConnectorRolloutRead { val connectorRollout = getAndRollOutConnectorRollout(connectorRolloutUpdate) val updatedConnectorRollout = connectorRolloutService.writeConnectorRollout(connectorRollout) - return buildConnectorRolloutRead(updatedConnectorRollout) + return buildConnectorRolloutRead(updatedConnectorRollout, true) } @Transactional("config") open fun finalizeConnectorRollout(connectorRolloutFinalize: ConnectorRolloutFinalizeRequestBody): ConnectorRolloutRead { val connectorRollout = getAndValidateFinalizeRequest(connectorRolloutFinalize) val updatedConnectorRollout = connectorRolloutService.writeConnectorRollout(connectorRollout) - return buildConnectorRolloutRead(updatedConnectorRollout) + return buildConnectorRolloutRead(updatedConnectorRollout, true) } open fun getConnectorRollout(id: UUID): ConnectorRolloutRead { val connectorRollout = connectorRolloutService.getConnectorRollout(id) - return buildConnectorRolloutRead(connectorRollout) + return buildConnectorRolloutRead(connectorRollout, true) } open fun updateState(connectorRolloutUpdateStateRequestBody: ConnectorRolloutUpdateStateRequestBody): ConnectorRolloutRead { @@ -431,7 +470,7 @@ open class ConnectorRolloutHandler connectorRolloutUpdateStateRequestBody.failedReason, ) val updatedConnectorRollout = connectorRolloutService.writeConnectorRollout(connectorRollout) - return buildConnectorRolloutRead(updatedConnectorRollout) + return buildConnectorRolloutRead(updatedConnectorRollout, true) } fun getActorSyncInfo(id: UUID): List { @@ -455,24 +494,34 @@ open class ConnectorRolloutHandler .nActorsEligibleOrAlreadyPinned(actorSelectionInfo.nActorsEligibleOrAlreadyPinned) } - open fun manualStartConnectorRollout(connectorRolloutWorkflowStart: ConnectorRolloutManualStartRequestBody): ConnectorRolloutRead { + open fun manualStartConnectorRollout(connectorRolloutManualStart: ConnectorRolloutManualStartRequestBody): ConnectorRolloutRead { val rollout = getOrCreateAndValidateManualStartInput( - connectorRolloutWorkflowStart.dockerRepository, - connectorRolloutWorkflowStart.actorDefinitionId, - connectorRolloutWorkflowStart.dockerImageTag, - connectorRolloutWorkflowStart.updatedBy, - connectorRolloutWorkflowStart.rolloutStrategy, + connectorRolloutManualStart.dockerRepository, + connectorRolloutManualStart.actorDefinitionId, + connectorRolloutManualStart.dockerImageTag, + connectorRolloutManualStart.updatedBy, + connectorRolloutManualStart.rolloutStrategy, + connectorRolloutManualStart.initialRolloutPct, + connectorRolloutManualStart.finalTargetRolloutPct, ) + try { connectorRolloutClient.startRollout( ConnectorRolloutActivityInputStart( - connectorRolloutWorkflowStart.dockerRepository, - connectorRolloutWorkflowStart.dockerImageTag, - connectorRolloutWorkflowStart.actorDefinitionId, + connectorRolloutManualStart.dockerRepository, + connectorRolloutManualStart.dockerImageTag, + connectorRolloutManualStart.actorDefinitionId, rollout.id, - connectorRolloutWorkflowStart.updatedBy, + connectorRolloutManualStart.updatedBy, rollout.rolloutStrategy, + actorDefinitionService.getActorDefinitionVersion(rollout.initialVersionId).dockerImageTag, + rollout, + getPinnedActorInfo(rollout.id), + getActorSyncInfo(rollout.id), + rollout.initialRolloutPct?.toInt(), + rollout.finalTargetRolloutPct?.toInt(), + connectorRolloutManualStart.migratePins, ), ) } catch (e: WorkflowUpdateException) { @@ -481,7 +530,7 @@ open class ConnectorRolloutHandler throw throwAirbyteApiClientExceptionIfExists("startWorkflow", e) } - return buildConnectorRolloutRead(connectorRolloutService.getConnectorRollout(rollout.id)) + return buildConnectorRolloutRead(connectorRolloutService.getConnectorRollout(rollout.id), false) } open fun manualDoConnectorRolloutUpdate(connectorRolloutUpdate: ConnectorRolloutManualRolloutRequestBody): ConnectorRolloutRead { @@ -496,6 +545,13 @@ open class ConnectorRolloutHandler connectorRolloutUpdate.id, connectorRolloutUpdate.updatedBy, getRolloutStrategyForManualUpdate(connectorRollout.rolloutStrategy), + actorDefinitionService.getActorDefinitionVersion(connectorRollout.initialVersionId).dockerImageTag, + connectorRollout, + getPinnedActorInfo(connectorRollout.id), + getActorSyncInfo(connectorRollout.id), + connectorRollout.initialRolloutPct.toInt(), + connectorRollout.finalTargetRolloutPct.toInt(), + connectorRolloutUpdate.migratePins, ), ) } catch (e: WorkflowUpdateException) { @@ -518,7 +574,7 @@ open class ConnectorRolloutHandler } catch (e: WorkflowUpdateException) { throw throwAirbyteApiClientExceptionIfExists("doRollout", e) } - return buildConnectorRolloutRead(connectorRolloutService.getConnectorRollout(connectorRolloutUpdate.id)) + return buildConnectorRolloutRead(connectorRolloutService.getConnectorRollout(connectorRolloutUpdate.id), false) } open fun manualFinalizeConnectorRollout( @@ -526,6 +582,7 @@ open class ConnectorRolloutHandler ): ConnectorRolloutManualFinalizeResponse { // Start a workflow if one doesn't exist val connectorRollout = connectorRolloutService.getConnectorRollout(connectorRolloutFinalize.id) + if (connectorRollout.state == ConnectorEnumRolloutState.INITIALIZED) { try { connectorRolloutClient.startRollout( @@ -536,6 +593,10 @@ open class ConnectorRolloutHandler connectorRolloutFinalize.id, connectorRolloutFinalize.updatedBy, getRolloutStrategyForManualUpdate(connectorRollout.rolloutStrategy), + actorDefinitionService.getActorDefinitionVersion(connectorRollout.initialVersionId).dockerImageTag, + connectorRollout, + getPinnedActorInfo(connectorRollout.id), + getActorSyncInfo(connectorRollout.id), ), ) } catch (e: WorkflowUpdateException) { @@ -561,6 +622,7 @@ open class ConnectorRolloutHandler connectorRolloutFinalize.failedReason, connectorRolloutFinalize.updatedBy, getRolloutStrategyForManualUpdate(connectorRollout.rolloutStrategy), + connectorRolloutFinalize.retainPinsOnCancellation, ), ) } catch (e: WorkflowUpdateException) { @@ -585,7 +647,7 @@ open class ConnectorRolloutHandler targetPercent: Int, ): ActorSelectionInfo { val actorSelectionInfo = rolloutActorFinder.getActorSelectionInfo(connectorRollout, targetPercent) - if (actorSelectionInfo.actorIdsToPin.isEmpty()) { + if (targetPercent > 0 && actorSelectionInfo.actorIdsToPin.isEmpty()) { throw ConnectorRolloutNotEnoughActorsProblem( ProblemMessageData().message( "No actors are eligible to be pinned for a progressive rollout.", diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/DiagnosticToolHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/DiagnosticToolHandler.kt index e9d28a5a6fb..8e6323d0d0b 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/DiagnosticToolHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/DiagnosticToolHandler.kt @@ -4,6 +4,8 @@ package io.airbyte.commons.server.handlers import io.airbyte.api.model.generated.ActorType +import io.airbyte.commons.csp.CspChecker +import io.airbyte.commons.yaml.Yamls import io.airbyte.config.DestinationConnection import io.airbyte.config.SourceConnection import io.airbyte.config.StandardSync @@ -14,7 +16,6 @@ import io.airbyte.data.services.ConnectionService import io.airbyte.data.services.DestinationService import io.airbyte.data.services.SourceService import io.airbyte.data.services.WorkspaceService -import io.fabric8.kubernetes.api.model.Container import io.fabric8.kubernetes.api.model.Quantity import io.fabric8.kubernetes.client.KubernetesClient import jakarta.inject.Singleton @@ -26,16 +27,16 @@ import java.io.FileOutputStream import java.io.IOException import java.time.Instant import java.util.UUID -import java.util.function.Consumer import java.util.stream.Stream import java.util.zip.ZipEntry import java.util.zip.ZipOutputStream -const val AIRBYTE_INSTANCE_YAML: String = "airbyte_instance.yaml" -const val AIRBYTE_DEPLOYMENT_YAML: String = "airbyte_deployment.yaml" -const val DIAGNOSTIC_REPORT_FILE_NAME: String = "diagnostic_report" -const val DIAGNOSTIC_REPORT_FILE_FORMAT: String = ".zip" -const val UNKNOWN: String = "Unknown" +internal const val AIRBYTE_INSTANCE_YAML = "airbyte_instance.yaml" +internal const val AIRBYTE_DEPLOYMENT_YAML = "airbyte_deployment.yaml" +internal const val AIRBYTE_CSP_CHECKS = "airbyte_csp_checks.yaml" +private const val DIAGNOSTIC_REPORT_FILE_NAME = "diagnostic_report" +private const val DIAGNOSTIC_REPORT_FILE_FORMAT = ".zip" +private const val UNKNOWN = "Unknown" /** * DiagnosticToolHandler. @@ -49,8 +50,9 @@ open class DiagnosticToolHandler( private val actorDefinitionVersionHelper: ActorDefinitionVersionHelper, private val instanceConfigurationHandler: InstanceConfigurationHandler, private val kubernetesClient: KubernetesClient, + private val cspChecker: CspChecker, ) { - private val yamlDumperOptions = DumperOptions().apply { defaultFlowStyle = DumperOptions.FlowStyle.BLOCK } + private val yaml = Yaml(DumperOptions().apply { defaultFlowStyle = DumperOptions.FlowStyle.BLOCK }) /** * Generate diagnostic report by collecting relevant data and zipping them into a single file. @@ -64,9 +66,7 @@ open class DiagnosticToolHandler( // Write the byte[] to a temporary file val tempFile = File.createTempFile(DIAGNOSTIC_REPORT_FILE_NAME, DIAGNOSTIC_REPORT_FILE_FORMAT) - FileOutputStream(tempFile).use { fos -> - fos.write(zipFileContent) - } + FileOutputStream(tempFile).use { it.write(zipFileContent) } // Return the temporary file tempFile } catch (e: IOException) { @@ -77,16 +77,22 @@ open class DiagnosticToolHandler( private fun generateZipInMemory(): ByteArray { val byteArrayOutputStream = ByteArrayOutputStream() val zipOut = ZipOutputStream(byteArrayOutputStream) - try { + + runCatching { addAirbyteInstanceYaml(zipOut) - } catch (e: Exception) { - logger.error { "Error in writing airbyte instance yaml. Message: ${e.message}" } + }.onFailure { + logger.error { "Error in writing airbyte instance yaml. Message: ${it.message}" } } - try { + runCatching { addAirbyteDeploymentYaml(zipOut) - } catch (e: IOException) { - logger.error { "Error in writing deployment yaml. Message: ${e.message}" } + }.onFailure { + logger.error { "Error in writing deployment yaml. Message: ${it.message}" } } + + runCatching { + addAirbyteCspChecks(zipOut) + }.onFailure { logger.error { "Error in writing csp-check yaml. Message: ${it.message}" } } + zipOut.finish() return byteArrayOutputStream.toByteArray() } @@ -111,10 +117,18 @@ open class DiagnosticToolHandler( "license" to collectLicenseInfo(), // TODO: Collect other information here, e.g: application logs, etc. ) - val yaml = Yaml(yamlDumperOptions) + return yaml.dump(airbyteInstanceYamlData) } + private fun addAirbyteCspChecks(zipOut: ZipOutputStream) { + ZipEntry(AIRBYTE_CSP_CHECKS).let { zipOut.putNextEntry(it) } + zipOut.write(generateAirbyteCspChecks().toByteArray()) + zipOut.closeEntry() + } + + private fun generateAirbyteCspChecks(): String = Yamls.serialize(cspChecker.check()) + private fun collectWorkspaceInfo(): List> = try { // get all workspaces @@ -128,8 +142,7 @@ open class DiagnosticToolHandler( "connections" to collectConnectionInfo(workspace.workspaceId), "connectors" to collectConnectorInfo(workspace.workspaceId), ) - } - .toList() + }.toList() } catch (e: IOException) { logger.error { "Error collecting workspace information. Message: ${e.message}}" } emptyList() @@ -149,8 +162,7 @@ open class DiagnosticToolHandler( "sourceId" to connection.sourceId.toString(), "destinationId" to connection.destinationId.toString(), ) - } - .toList() + }.toList() } catch (e: IOException) { logger.error { "Error collecting connection information. Message: ${e.message}" } emptyList() @@ -167,12 +179,11 @@ open class DiagnosticToolHandler( // TODO: isSourceActive feels like it could not throw and just return false if the config is not // found. try { - return@filter sourceService.isSourceActive(source.sourceId) + sourceService.isSourceActive(source.sourceId) } catch (e: IOException) { - return@filter false + false } - } - .map { source: SourceConnection -> + }.map { source: SourceConnection -> var sourceDefinitionVersion: ActorDefinitionVersionWithOverrideStatus? = null try { sourceDefinitionVersion = @@ -193,8 +204,7 @@ open class DiagnosticToolHandler( "connectorVersionOverrideApplied" to (sourceDefinitionVersion?.isOverrideApplied?.toString() ?: ""), "connectorSupportState" to (sourceDefinitionVersion?.actorDefinitionVersion?.supportState?.toString() ?: ""), ) - } - .toList() + }.toList() // get all destinations by workspaceId (only include active ones in the report) val destinations = @@ -204,12 +214,11 @@ open class DiagnosticToolHandler( // TODO: isDestinationActive feels like it could not throw and just return false if the config is // not found. try { - return@filter destinationService.isDestinationActive(destination.destinationId) + destinationService.isDestinationActive(destination.destinationId) } catch (e: IOException) { - return@filter false + false } - } - .map { destination: DestinationConnection -> + }.map { destination: DestinationConnection -> var destinationDefinitionVersion: ActorDefinitionVersionWithOverrideStatus? = null try { destinationDefinitionVersion = @@ -269,11 +278,7 @@ open class DiagnosticToolHandler( private fun generateDeploymentYaml(): String { // Collect cluster information - val deploymentYamlData = - mapOf( - "k8s" to collectK8sInfo(), - ) - val yaml = Yaml(yamlDumperOptions) + val deploymentYamlData = mapOf("k8s" to collectK8sInfo()) return yaml.dump(deploymentYamlData) } @@ -289,54 +294,74 @@ open class DiagnosticToolHandler( private fun collectNodeInfo(client: KubernetesClient): List> { logger.info { "Collecting nodes data..." } - val nodeList: MutableList> = ArrayList() - val nodes = client.nodes()?.list()?.items ?: emptyList() - for (node in nodes) { - val nodeInfo = - mapOf( - "name" to node.metadata.name, - "readyStatus" to ( + + val nodeList: List> = + client + .nodes() + ?.list() + ?.items + ?.map { node -> + val limits = Limits(node.status.allocatable) + val readyStatus = node.status.conditions .filter { it.type == "Ready" } .firstNotNullOfOrNull { it.status } ?: UNKNOWN - ), - "cpu" to (node.status.allocatable["cpu"]?.let { it.amount.toString() + it.format } ?: UNKNOWN), - "memory" to (node.status.allocatable["memory"]?.let { it.amount.toString() + it.format } ?: UNKNOWN), - ) - nodeList.add(nodeInfo) - } + + mapOf( + "name" to node.metadata.name, + "readyStatus" to readyStatus, + "cpu" to limits.cpu, + "memory" to limits.memory, + ) + }?.toList() ?: emptyList() + return nodeList } private fun collectPodInfo(client: KubernetesClient): List> { logger.info { "Collecting pods data..." } - val podList: MutableList> = ArrayList() - val pods = client.pods()?.inNamespace("ab")?.list()?.items ?: emptyList() - for (pod in pods) { - val podInfo: MutableMap = HashMap() - podInfo["name"] = pod.metadata.name - podInfo["status"] = pod.status.phase - val containerLimits: MutableList> = ArrayList() - pod.spec.containers.forEach( - Consumer { container: Container -> - val containerLimit: MutableMap = HashMap() - containerLimit["containerName"] = container.name - val limit = getContainerResourceLimit(container) - containerLimit["cpu"] = limit?.get("cpu")?.let { it.amount.toString() + it.format.toString() } ?: UNKNOWN - containerLimit["memory"] = limit?.get("memory")?.let { it.amount.toString() + it.format.toString() } ?: UNKNOWN - containerLimits.add(containerLimit) - }, - ) - podInfo["limits"] = containerLimits - podList.add(podInfo) - } + val pods = + client + .pods() + ?.inNamespace("ab") + ?.list() + ?.items ?: emptyList() + + val podList: List> = + pods + .map { pod -> + val podInfo = + mutableMapOf( + "name" to pod.metadata.name, + "status" to pod.status.phase, + ) + + val containerLimits: List> = + pod.spec.containers + .map { container -> + val limits = Limits(container.resources?.limits) + + mapOf( + "containerName" to container.name, + "cpu" to limits.cpu, + "memory" to limits.memory, + ) + }.toList() + + podInfo["limits"] = containerLimits + podInfo + }.toList() + return podList } +} - private fun getContainerResourceLimit(container: Container): Map? { - if (container.resources == null || container.resources.limits == null) { - return null - } - return container.resources.limits - } +private data class Limits( + val cpu: String = UNKNOWN, + val memory: String = UNKNOWN, +) { + constructor(limits: Map?) : this( + cpu = limits?.get("cpu")?.let { it.amount.toString() + it.format.toString() } ?: UNKNOWN, + memory = limits?.get("memory")?.let { it.amount.toString() + it.format.toString() } ?: UNKNOWN, + ) } diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt index 57df52321b9..c67029ddc1c 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandler.kt @@ -21,10 +21,7 @@ import io.airbyte.data.services.OrganizationService import io.airbyte.data.services.PermissionRedundantException import io.airbyte.data.services.PermissionService import io.airbyte.data.services.WorkspaceService -import io.airbyte.featureflag.BillingInArrearsForNewSignups -import io.airbyte.featureflag.EmailAttribute import io.airbyte.featureflag.FeatureFlagClient -import io.airbyte.featureflag.User import io.github.oshai.kotlinlogging.KotlinLogging import jakarta.inject.Named import jakarta.inject.Singleton @@ -92,27 +89,22 @@ open class ResourceBootstrapHandler( fun findOrCreateOrganizationAndPermission(user: AuthenticatedUser): Organization { findExistingOrganization(user)?.let { return it } - val billingInArrears = featureFlagClient.boolVariation(BillingInArrearsForNewSignups, User(user.userId, EmailAttribute(user.email))) val organization = Organization().apply { this.organizationId = uuidSupplier.get() this.userId = user.userId this.name = getDefaultOrganizationName(user) this.email = user.email - this.orgLevelBilling = billingInArrears - this.pba = false } organizationService.writeOrganization(organization) - if (billingInArrears) { - logger.info { "Creating organization ${organization.organizationId} with billing in arrears enabled" } - val paymentConfig = - OrganizationPaymentConfig() - .withOrganizationId(organization.organizationId) - .withPaymentStatus(OrganizationPaymentConfig.PaymentStatus.UNINITIALIZED) + val paymentConfig = + OrganizationPaymentConfig() + .withOrganizationId(organization.organizationId) + .withPaymentStatus(OrganizationPaymentConfig.PaymentStatus.UNINITIALIZED) + .withSubscriptionStatus(OrganizationPaymentConfig.SubscriptionStatus.PRE_SUBSCRIPTION) - organizationPaymentConfigService.savePaymentConfig(paymentConfig) - } + organizationPaymentConfigService.savePaymentConfig(paymentConfig) val organizationPermission = buildDefaultOrganizationPermission(user.userId, organization.organizationId) permissionService.createPermission(organizationPermission) @@ -151,29 +143,27 @@ open class ResourceBootstrapHandler( private fun buildDefaultWorkspacePermission( userId: UUID, workspaceId: UUID, - ): Permission { - return Permission().apply { + ): Permission = + Permission().apply { this.userId = userId this.workspaceId = workspaceId this.permissionType = DEFAULT_WORKSPACE_PERMISSION_TYPE this.permissionId = uuidSupplier.get() } - } private fun buildDefaultOrganizationPermission( userId: UUID, organizationId: UUID, - ): Permission { - return Permission().apply { + ): Permission = + Permission().apply { this.userId = userId this.organizationId = organizationId this.permissionType = DEFAULT_ORGANIZATION_PERMISSION_TYPE this.permissionId = uuidSupplier.get() } - } - private fun getDefaultOrganizationName(user: AuthenticatedUser): String { - return when { + private fun getDefaultOrganizationName(user: AuthenticatedUser): String = + when { user.companyName != null -> { "${user.companyName}'s Organization" } @@ -186,5 +176,4 @@ open class ResourceBootstrapHandler( "${user.email.split("@").first()}'s Organization" } } - } } diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/MapperSecretHelper.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/MapperSecretHelper.kt new file mode 100644 index 00000000000..0f1c80d5f08 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/helpers/MapperSecretHelper.kt @@ -0,0 +1,276 @@ +package io.airbyte.commons.server.handlers.helpers + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.api.problems.model.generated.ProblemMapperIdData +import io.airbyte.api.problems.throwable.generated.MapperSecretNotFoundProblem +import io.airbyte.api.problems.throwable.generated.RuntimeSecretsManagerRequiredProblem +import io.airbyte.commons.constants.AirbyteSecretConstants +import io.airbyte.commons.json.Jsons +import io.airbyte.config.Configs.DeploymentMode +import io.airbyte.config.ConfiguredAirbyteCatalog +import io.airbyte.config.ConfiguredAirbyteStream +import io.airbyte.config.ConfiguredMapper +import io.airbyte.config.MapperConfig +import io.airbyte.config.ScopeType +import io.airbyte.config.StreamDescriptor +import io.airbyte.config.secrets.JsonSecretsProcessor +import io.airbyte.config.secrets.SecretsHelpers +import io.airbyte.config.secrets.SecretsRepositoryReader +import io.airbyte.config.secrets.SecretsRepositoryWriter +import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence +import io.airbyte.data.services.SecretPersistenceConfigService +import io.airbyte.data.services.WorkspaceService +import io.airbyte.featureflag.AllowMappersDefaultSecretPersistence +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.Organization +import io.airbyte.featureflag.UseRuntimeSecretPersistence +import io.airbyte.mappers.transformations.Mapper +import io.airbyte.mappers.transformations.MapperSpec +import jakarta.inject.Inject +import jakarta.inject.Named +import jakarta.inject.Singleton +import secrets.persistence.SecretCoordinateException +import java.util.UUID + +@Singleton +class MapperSecretHelper( + private val mappers: Map>, + private val workspaceService: WorkspaceService, + private val secretPersistenceConfigService: SecretPersistenceConfigService, + private val secretsRepositoryWriter: SecretsRepositoryWriter, + private val secretsRepositoryReader: SecretsRepositoryReader, + @Named("jsonSecretsProcessorWithCopy") private val secretsProcessor: JsonSecretsProcessor, + private val featureFlagClient: FeatureFlagClient, + private val deploymentMode: DeploymentMode, +) { + @Inject + constructor( + mappers: List>, + workspaceService: WorkspaceService, + secretPersistenceConfigService: SecretPersistenceConfigService, + secretsRepositoryWriter: SecretsRepositoryWriter, + secretsRepositoryReader: SecretsRepositoryReader, + @Named("jsonSecretsProcessorWithCopy") secretsProcessor: JsonSecretsProcessor, + featureFlagClient: FeatureFlagClient, + deploymentMode: DeploymentMode, + ) : this( + mappers.associateBy { it.name }, + workspaceService, + secretPersistenceConfigService, + secretsRepositoryWriter, + secretsRepositoryReader, + secretsProcessor, + featureFlagClient, + deploymentMode, + ) + + private fun getMapper(name: String): Mapper { + return mappers[name] ?: throw IllegalArgumentException("Mapper $name not found") + } + + private fun specHasSecrets(spec: JsonNode): Boolean { + return SecretsHelpers.getSortedSecretPaths(spec).isNotEmpty() + } + + private fun getSecretPersistence(organizationId: UUID): RuntimeSecretPersistence? { + val isRuntimePersistenceEnabled = featureFlagClient.boolVariation(UseRuntimeSecretPersistence, Organization(organizationId)) + if (!isRuntimePersistenceEnabled) { + if (deploymentMode == DeploymentMode.CLOUD && + !featureFlagClient.boolVariation( + AllowMappersDefaultSecretPersistence, + Organization(organizationId), + ) + ) { + throw RuntimeSecretsManagerRequiredProblem() + } + return null + } + val secretPersistenceConfig = secretPersistenceConfigService.get(ScopeType.ORGANIZATION, organizationId) + return RuntimeSecretPersistence(secretPersistenceConfig) + } + + private fun assertConfigHasNoMaskedSecrets( + config: JsonNode, + mapperId: UUID?, + mapperType: String, + ) { + val configAsString = Jsons.serialize(config) + if (configAsString.contains(AirbyteSecretConstants.SECRETS_MASK)) { + throw MapperSecretNotFoundProblem(ProblemMapperIdData().mapperId(mapperId).mapperType(mapperType)) + } + } + + private fun handleMapperConfigSecrets( + mapperConfig: MapperConfig, + workspaceId: UUID, + organizationId: UUID, + ): MapperConfig { + return handleMapperConfigSecrets(mapperConfig, existingMapperConfig = null, workspaceId, organizationId) + } + + private fun handleMapperConfigSecrets( + mapperConfig: MapperConfig, + existingMapperConfig: MapperConfig?, + workspaceId: UUID, + organizationId: UUID, + ): MapperConfig { + val mapperName = mapperConfig.name() + val mapperInstance = getMapper(mapperName) + val mapperConfigSchema = getConfigSchema(mapperInstance.spec()) + + if (!specHasSecrets(mapperConfigSchema)) { + // Nothing to do, no secrets in spec + return mapperConfig + } + + val secretPersistence = getSecretPersistence(organizationId) + val persistedConfigAsJson = existingMapperConfig?.let { Jsons.jsonNode(it.config()) } + val hydratedPersistedConfig = tryHydrateConfigJson(persistedConfigAsJson, secretPersistence) + + val newConfigJson = + if (hydratedPersistedConfig != null) { + // copy any necessary secrets from the current mapper to the incoming updated mapper + val configWithSecrets = + secretsProcessor.copySecrets( + hydratedPersistedConfig, + Jsons.jsonNode(mapperConfig.config()), + mapperConfigSchema, + ) + secretsRepositoryWriter.updateFromConfig( + workspaceId, + persistedConfigAsJson!!, + configWithSecrets, + mapperConfigSchema, + secretPersistence, + ) + } else { + val configWithSecrets = Jsons.jsonNode(mapperConfig.config()) + assertConfigHasNoMaskedSecrets(configWithSecrets, mapperConfig.id(), mapperName) + secretsRepositoryWriter.createFromConfig( + workspaceId, + configWithSecrets, + mapperConfigSchema, + secretPersistence, + ) + } + + return mapperInstance.spec().deserialize(ConfiguredMapper(mapperName, newConfigJson, mapperConfig.id())) + } + + /** + * Detects secrets in mapper configurations and writes them to the secrets store. It returns the + * connection configuration stripped of secrets (replaced with pointers to the secrets store). + */ + fun createAndReplaceMapperSecrets( + workspaceId: UUID, + catalog: ConfiguredAirbyteCatalog, + ): ConfiguredAirbyteCatalog { + val organizationId = workspaceService.getOrganizationIdFromWorkspaceId(workspaceId).get() + return catalog.copy( + streams = + catalog.streams.map { stream -> + stream.copy( + mappers = + stream.mappers.map { + handleMapperConfigSecrets(it, workspaceId, organizationId) + }, + ) + }, + ) + } + + private fun getStreamUpdatedMappers( + stream: ConfiguredAirbyteStream, + oldStream: ConfiguredAirbyteStream?, + workspaceId: UUID, + organizationId: UUID, + ): List { + val oldMappersById = oldStream?.mappers?.filter { it.id() != null }?.associateBy { it.id() } ?: emptyMap() + return stream.mappers.map { + val existingConfig = it.id().let { id -> oldMappersById[id] } + handleMapperConfigSecrets(it, existingConfig, workspaceId, organizationId) + } + } + + fun updateAndReplaceMapperSecrets( + workspaceId: UUID, + oldCatalog: ConfiguredAirbyteCatalog, + newCatalog: ConfiguredAirbyteCatalog, + ): ConfiguredAirbyteCatalog { + val organizationId = workspaceService.getOrganizationIdFromWorkspaceId(workspaceId).get() + val oldStreams = oldCatalog.streams.associateBy { StreamDescriptor().withName(it.stream.name).withNamespace(it.stream.namespace) } + return newCatalog.copy( + streams = + newCatalog.streams.map { stream -> + val streamDescriptor = + StreamDescriptor() + .withName(stream.stream.name) + .withNamespace(stream.stream.namespace) + val oldStream = oldStreams[streamDescriptor] + stream.copy( + mappers = getStreamUpdatedMappers(stream, oldStream, workspaceId, organizationId), + ) + }, + ) + } + + private fun getConfigSchema(mapperSpec: MapperSpec<*>): JsonNode { + val mapperSpecSchema = mapperSpec.jsonSchema() + if (!mapperSpecSchema.has("properties") || !mapperSpecSchema.get("properties").has("config")) { + throw IllegalStateException("Mapper spec schema does not have a config property") + } + return mapperSpecSchema.get("properties").get("config") + } + + private fun maskMapperConfigSecrets(mapperConfig: MapperConfig): MapperConfig { + val mapperName = mapperConfig.name() + val mapperInstance = getMapper(mapperName) + val mapperConfigSchema = getConfigSchema(mapperInstance.spec()) + val maskedConfig = secretsProcessor.prepareSecretsForOutput(Jsons.jsonNode(mapperConfig.config()), mapperConfigSchema) + return mapperInstance.spec().deserialize(ConfiguredMapper(mapperName, maskedConfig, mapperConfig.id())) + } + + private fun maskMapperSecretsForStream(stream: ConfiguredAirbyteStream): ConfiguredAirbyteStream { + return stream.copy( + mappers = + stream.mappers.map { + maskMapperConfigSecrets(it) + }, + ) + } + + /** + * Given a catalog with mapper configurations, mask the secrets in the configurations. + */ + fun maskMapperSecrets(catalog: ConfiguredAirbyteCatalog): ConfiguredAirbyteCatalog { + return catalog.copy( + streams = + catalog.streams.map { + maskMapperSecretsForStream(it) + }, + ) + } + + private fun tryHydrateConfigJson( + persistedConfigJson: JsonNode?, + runtimeSecretPersistence: RuntimeSecretPersistence?, + ): JsonNode? { + if (persistedConfigJson == null) { + return null + } + + return try { + if (runtimeSecretPersistence != null) { + secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence( + persistedConfigJson, + runtimeSecretPersistence, + ) + } else { + secretsRepositoryReader.hydrateConfigFromDefaultSecretPersistence(persistedConfigJson) + } + } catch (e: SecretCoordinateException) { + // Some secret is missing, treat as a new config + null + } + } +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/helpers/CronExpressionHelper.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/helpers/CronExpressionHelper.kt new file mode 100644 index 00000000000..4118ee240d9 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/helpers/CronExpressionHelper.kt @@ -0,0 +1,72 @@ +package io.airbyte.commons.server.helpers + +import com.cronutils.descriptor.CronDescriptor +import com.cronutils.model.Cron +import com.cronutils.model.CronType +import com.cronutils.model.definition.CronDefinitionBuilder +import com.cronutils.model.time.ExecutionTime +import com.cronutils.parser.CronParser +import jakarta.inject.Singleton +import java.time.ZonedDateTime +import java.util.Locale + +@Singleton +class CronExpressionHelper { + fun validateCronExpression(cronExpression: String): Cron { + val cronDefinition = CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ) + val cron = CronParser(cronDefinition).parse(cronExpression) + + return try { + cron.validate() + // Airbyte never accepts cron expressions that execute more than once per minute + checkDoesNotExecuteMoreThanOncePerMinute(cron) + cron + } catch (e: IllegalArgumentException) { + throw IllegalArgumentException("Invalid cron expression: ${e.message}") + } + } + + fun describeCronExpression(cron: Cron): String = CronDescriptor.instance(Locale.ENGLISH).describe(cron) + + fun getNextExecutions( + cron: Cron, + numberOfExecutions: Int, + ): List { + val executionTime = ExecutionTime.forCron(cron) + val nextExecutions = mutableListOf() + var nextExecution = ZonedDateTime.now() + + for (i in 1..numberOfExecutions) { + nextExecution = executionTime.nextExecution(nextExecution).orElse(null) ?: break + nextExecutions.add(nextExecution.toEpochSecond()) + } + + return nextExecutions + } + + fun checkDoesNotExecuteMoreThanOncePerHour(cron: Cron) { + val nextExecutions = getNextExecutions(cron, 3) + // Make sure the time difference between the next 3 executions does not exceed 1 hour + + nextExecutions.zipWithNext { prev, next -> + if (next - prev < 3600) { + throw IllegalArgumentException( + "Cron executions must be more than 1 hour apart", + ) + } + } + } + + fun checkDoesNotExecuteMoreThanOncePerMinute(cron: Cron) { + val nextExecutions = getNextExecutions(cron, 3) + // Make sure the time difference between the next 3 executions does not exceed 1 minute + + nextExecutions.zipWithNext { prev, next -> + if (next - prev < 60) { + throw IllegalArgumentException( + "Cron executions must be more than 1 minute apart", + ) + } + } + } +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/services/ConnectionService.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/services/ConnectionService.kt new file mode 100644 index 00000000000..5f4a8a8664b --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/services/ConnectionService.kt @@ -0,0 +1,194 @@ +package io.airbyte.commons.server.services + +import io.airbyte.api.model.generated.ConnectionStatus +import io.airbyte.commons.server.ConnectionId +import io.airbyte.commons.server.handlers.helpers.ConnectionTimelineEventHelper +import io.airbyte.commons.server.scheduler.EventRunner +import io.airbyte.config.Job +import io.airbyte.config.JobStatus +import io.airbyte.config.StandardSync +import io.airbyte.data.services.shared.ConnectionAutoDisabledReason +import io.airbyte.persistence.job.JobNotifier +import io.airbyte.persistence.job.JobPersistence +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.context.annotation.Value +import io.micronaut.transaction.annotation.Transactional +import jakarta.inject.Singleton +import java.time.Duration +import java.time.Instant +import java.util.UUID +import kotlin.jvm.optionals.getOrNull +import io.airbyte.data.services.ConnectionService as ConnectionRepository +import io.airbyte.data.services.JobService as JobRepository + +private val logger = KotlinLogging.logger {} + +/** + * Application service for performing business logic related to connections. + */ +interface ConnectionService { + /** + * Disable connections and record a timeline event for each. + * If connections are disabled by an automatic process, the auto-disabled reason should be + * provided so that an appropriate timeline event can be recorded. + * + * @return the set of connection IDs that were disabled + */ + fun disableConnections( + connectionIds: Set, + autoDisabledReason: ConnectionAutoDisabledReason?, + ): Set + + /** + * Send a warning and/or disable a connection if it has too many failed jobs in a row and no + * successful jobs within the configured time frame. + * + * @return true if the connection was disabled, false otherwise + */ + fun warnOrDisableForConsecutiveFailures( + connectionId: ConnectionId, + timestamp: Instant, + ): Boolean + + @Deprecated("Use ConnectionId version instead, removable once Java callers are converted to Kotlin") + fun warnOrDisableForConsecutiveFailures( + connectionId: UUID, + timestamp: Instant, + ): Boolean +} + +@Singleton +open class ConnectionServiceImpl( + private val connectionRepository: ConnectionRepository, + private val connectionTimelineEventHelper: ConnectionTimelineEventHelper, + private val warnOrDisableHelper: WarnOrDisableConnectionHelper, + private val eventRunner: EventRunner, +) : ConnectionService { + @Transactional("config") + override fun disableConnections( + connectionIds: Set, + autoDisabledReason: ConnectionAutoDisabledReason?, + ): Set { + val disabledConnectionIds = connectionRepository.disableConnectionsById(connectionIds.toList().map(ConnectionId::value)) + disabledConnectionIds.forEach { connectionId -> + connectionTimelineEventHelper.logStatusChangedEventInConnectionTimeline( + connectionId, + ConnectionStatus.INACTIVE, + autoDisabledReason?.name, + autoDisabledReason != null, + ) + eventRunner.update(connectionId) + } + return disabledConnectionIds.map(::ConnectionId).toSet() + } + + override fun warnOrDisableForConsecutiveFailures( + connectionId: ConnectionId, + timestamp: Instant, + ): Boolean = warnOrDisableHelper.warnOrDisable(this, connectionId, timestamp) + + @Deprecated("Use ConnectionId version instead, removable once Java callers are converted to Kotlin") + override fun warnOrDisableForConsecutiveFailures( + connectionId: UUID, + timestamp: Instant, + ): Boolean = warnOrDisableForConsecutiveFailures(ConnectionId(connectionId), timestamp) +} + +/** + * Helper class for warning or disabling a connection based on the number of consecutive failed jobs. + * Extracted into its own class to avoid overcrowding the ConnectionServiceImpl with dependencies + * and configurations that are only needed for this specific use case. + */ +@Singleton +class WarnOrDisableConnectionHelper( + private val connectionRepository: ConnectionRepository, + private val jobService: JobRepository, + private val jobPersistence: JobPersistence, + private val jobNotifier: JobNotifier, + @Value("\${airbyte.server.connection.limits.max-days}") private val maxDaysOfOnlyFailedJobsBeforeConnectionDisable: Int, + @Value("\${airbyte.server.connection.limits.max-jobs}") private val maxFailedJobsInARowBeforeConnectionDisable: Int, + @Value("\${airbyte.server.connection.limits.max-days-warning}") private val maxDaysOfOnlyFailedJobsBeforeConnectionWarning: Int, + @Value("\${airbyte.server.connection.limits.max-jobs-warning}") private val maxFailedJobsInARowBeforeConnectionWarning: Int, +) { + fun warnOrDisable( + connectionService: ConnectionService, + connectionId: ConnectionId, + timestamp: Instant, + ): Boolean { + val firstJob = jobPersistence.getFirstReplicationJob(connectionId.value).getOrNull() + val mostRecentJob = jobPersistence.getLastReplicationJob(connectionId.value).getOrNull() + + if (firstJob == null || mostRecentJob == null) { + logger.error { "No replication job has been run." } + return false + } + + if (mostRecentJob.status != JobStatus.FAILED) { + logger.error { "Most recent job with ID ${mostRecentJob.id} is not failed." } + return false + } + + val priorFailedJob = jobService.getPriorJobWithStatusForScopeAndJobId(connectionId.value.toString(), mostRecentJob.id, JobStatus.FAILED) + val standardSync = connectionRepository.getStandardSync(connectionId.value) + val lastSuccessfulJob = jobService.lastSuccessfulJobForScope(connectionId.value.toString()) + + if (standardSync.status == StandardSync.Status.INACTIVE) { + logger.info { "Connection with ID $connectionId is already disabled." } + return false + } + + val daysWithoutSuccessWindowStart = Instant.ofEpochSecond(lastSuccessfulJob?.createdAtInSecond ?: firstJob.createdAtInSecond) + val numConsecutiveFailedJobs = jobService.countFailedJobsSinceLastSuccessForScope(connectionId.value.toString()) + val daysWithoutSuccess = getDaysBetweenTimestamps(daysWithoutSuccessWindowStart, timestamp) + + if (shouldDisableConnection(numConsecutiveFailedJobs, daysWithoutSuccess)) { + connectionService.disableConnections( + setOf(connectionId), + ConnectionAutoDisabledReason.TOO_MANY_FAILED_JOBS_WITH_NO_RECENT_SUCCESS, + ) + jobNotifier.autoDisableConnection(mostRecentJob, getAttemptStatsForJob(mostRecentJob)) + return true + } + + if (priorFailedJob != null && + shouldWarnAboutConnection(priorFailedJob, numConsecutiveFailedJobs, daysWithoutSuccess, daysWithoutSuccessWindowStart) + ) { + warnAboutConnection(mostRecentJob) + } + return false + } + + private fun shouldDisableConnection( + numConsecutiveFailedJobs: Int, + daysWithoutSuccess: Int, + ) = numConsecutiveFailedJobs >= maxFailedJobsInARowBeforeConnectionDisable && + daysWithoutSuccess >= maxDaysOfOnlyFailedJobsBeforeConnectionDisable + + private fun shouldWarnAboutConnection( + priorFailedJob: Job, + numConsecutiveFailedJobs: Int, + daysWithoutSuccess: Int, + daysWithoutSuccessWindowStart: Instant, + ): Boolean { + val priorDaysWithoutSuccess = getDaysBetweenTimestamps(daysWithoutSuccessWindowStart, Instant.ofEpochSecond(priorFailedJob.createdAtInSecond)) + val wasPriorWarningSent = + priorDaysWithoutSuccess >= maxDaysOfOnlyFailedJobsBeforeConnectionWarning && + numConsecutiveFailedJobs - 1 >= maxFailedJobsInARowBeforeConnectionWarning + + return !wasPriorWarningSent && + daysWithoutSuccess >= maxDaysOfOnlyFailedJobsBeforeConnectionWarning && + numConsecutiveFailedJobs >= maxFailedJobsInARowBeforeConnectionWarning + } + + private fun warnAboutConnection(mostRecentJob: Job) { + jobNotifier.autoDisableConnectionWarning(mostRecentJob, getAttemptStatsForJob(mostRecentJob)) + } + + private fun getAttemptStatsForJob(job: Job): List = + job.attempts.map { jobPersistence.getAttemptStats(job.id, it.attemptNumber) } + + private fun getDaysBetweenTimestamps( + firstInstant: Instant, + secondInstant: Instant, + ): Int = Duration.between(firstInstant, secondInstant).toDays().toInt() +} diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/services/OrganizationService.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/services/OrganizationService.kt new file mode 100644 index 00000000000..213b242e853 --- /dev/null +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/services/OrganizationService.kt @@ -0,0 +1,99 @@ +package io.airbyte.commons.server.services + +import io.airbyte.api.problems.ResourceType +import io.airbyte.api.problems.model.generated.ProblemMessageData +import io.airbyte.api.problems.model.generated.ProblemResourceData +import io.airbyte.api.problems.throwable.generated.ResourceNotFoundProblem +import io.airbyte.api.problems.throwable.generated.StateConflictProblem +import io.airbyte.commons.server.ConnectionId +import io.airbyte.commons.server.OrganizationId +import io.airbyte.config.OrganizationPaymentConfig.PaymentStatus +import io.airbyte.data.services.shared.ConnectionAutoDisabledReason +import io.micronaut.transaction.annotation.Transactional +import jakarta.inject.Singleton +import io.airbyte.data.services.ConnectionService as ConnectionRepository +import io.airbyte.data.services.OrganizationPaymentConfigService as OrganizationPaymentConfigRepository + +/** + * Application service for performing business logic related to organizations. + */ +interface OrganizationService { + /** + * Disable all connections in an organization. + * + * @param organizationId the ID of the organization to disable connections for + * @param autoDisableReason if set, the reason the connections were disabled through an automatic process + * @return the set of connection IDs that were disabled + */ + fun disableAllConnections( + organizationId: OrganizationId, + autoDisableReason: ConnectionAutoDisabledReason?, + ): Set + + /** + * Handle the end of a payment grace period for an organization. + * + * @param organizationId the ID of the organization that reached the end of a grace period. + */ + fun handlePaymentGracePeriodEnded(organizationId: OrganizationId) + + /** + * Handle an uncollectible invoice for an organization. + * + * @param organizationId the ID of the organization with the uncollectible invoice. + */ + fun handleUncollectibleInvoice(organizationId: OrganizationId) +} + +@Singleton +open class OrganizationServiceImpl( + private val connectionService: ConnectionService, + private val connectionRepository: ConnectionRepository, + private val organizationPaymentConfigRepository: OrganizationPaymentConfigRepository, +) : OrganizationService { + @Transactional("config") + override fun disableAllConnections( + organizationId: OrganizationId, + autoDisabledReason: ConnectionAutoDisabledReason?, + ): Set { + val connectionIds = connectionRepository.listConnectionIdsForOrganization(organizationId.value).map(::ConnectionId).toSet() + return connectionService.disableConnections(connectionIds, autoDisabledReason) + } + + @Transactional("config") + override fun handlePaymentGracePeriodEnded(organizationId: OrganizationId) { + val orgPaymentConfig = + organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) + ?: throw ResourceNotFoundProblem( + ProblemResourceData().resourceId(organizationId.toString()).resourceType(ResourceType.ORGANIZATION_PAYMENT_CONFIG), + ) + + if (orgPaymentConfig.paymentStatus != PaymentStatus.GRACE_PERIOD) { + throw StateConflictProblem( + ProblemMessageData().message( + "OrganizationPaymentConfig paymentStatus is ${orgPaymentConfig.paymentStatus}, but expected ${PaymentStatus.GRACE_PERIOD}", + ), + ) + } + + orgPaymentConfig.paymentStatus = PaymentStatus.DISABLED + organizationPaymentConfigRepository.savePaymentConfig(orgPaymentConfig) + + disableAllConnections(organizationId, ConnectionAutoDisabledReason.INVALID_PAYMENT_METHOD) + // TODO send an email summarizing the disabled connections and payment method problem + } + + override fun handleUncollectibleInvoice(organizationId: OrganizationId) { + val orgPaymentConfig = + organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) + ?: throw ResourceNotFoundProblem( + ProblemResourceData().resourceId(organizationId.toString()).resourceType(ResourceType.ORGANIZATION_PAYMENT_CONFIG), + ) + + orgPaymentConfig.paymentStatus = PaymentStatus.LOCKED + organizationPaymentConfigRepository.savePaymentConfig(orgPaymentConfig) + + disableAllConnections(organizationId, ConnectionAutoDisabledReason.INVOICE_MARKED_UNCOLLECTIBLE) + // TODO send an email summarizing the disabled connections and uncollectible invoice problem + } +} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogConverterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogConverterTest.java index 963718c8a0c..517effc9036 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogConverterTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogConverterTest.java @@ -33,6 +33,7 @@ import io.micronaut.test.extensions.junit5.annotation.MicronautTest; import jakarta.inject.Inject; import java.util.List; +import java.util.UUID; import org.junit.jupiter.api.Test; @MicronautTest @@ -62,23 +63,29 @@ void testEnumConversion() { @Test void testConvertInternal() throws JsonValidationException { final HashingMapperConfig hashingMapper = MapperHelperKt.createHashingMapper(SECOND_FIELD_NAME); + final HashingMapperConfig hashingMapper2 = MapperHelperKt.createHashingMapper(FIELD_NAME, UUID.randomUUID()); final var apiCatalog = ConnectionHelpers.generateApiCatalogWithTwoFields(); final var apiStream = apiCatalog.getStreams().getFirst(); apiStream.getConfig().setMappers( List.of(new ConfiguredStreamMapper() .type(StreamMapperType.HASHING) - .mapperConfiguration(Jsons.jsonNode(hashingMapper.getConfig())))); + .mapperConfiguration(Jsons.jsonNode(hashingMapper.getConfig())), + new ConfiguredStreamMapper() + .id(hashingMapper2.id()) + .type(StreamMapperType.HASHING) + .mapperConfiguration(Jsons.jsonNode(hashingMapper2.getConfig())))); final var internalCatalog = catalogConverter.toConfiguredInternal(apiCatalog); assertEquals(1, internalCatalog.getStreams().size()); final var internalStream = internalCatalog.getStreams().getFirst(); final var mappers = internalStream.getMappers(); - assertEquals(1, mappers.size()); + assertEquals(2, mappers.size()); final var fields = internalStream.getFields(); assertEquals(2, fields.size()); assertEquals(hashingMapper, mappers.getFirst()); + assertEquals(hashingMapper2, mappers.get(1)); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogDiffConvertersTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogDiffConvertersTest.java index 6c23cf79873..75e599494e6 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogDiffConvertersTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/CatalogDiffConvertersTest.java @@ -9,8 +9,8 @@ import io.airbyte.api.model.generated.FieldTransform; import io.airbyte.api.model.generated.StreamTransform; import io.airbyte.commons.enums.Enums; -import io.airbyte.commons.protocol.transform_models.FieldTransformType; -import io.airbyte.commons.protocol.transform_models.StreamTransformType; +import io.airbyte.commons.protocol.transformmodels.FieldTransformType; +import io.airbyte.commons.protocol.transformmodels.StreamTransformType; import org.junit.jupiter.api.Test; class CatalogDiffConvertersTest { diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/JobConverterTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/JobConverterTest.java index 3e1d6da9279..af4e3a86fb6 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/JobConverterTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/converters/JobConverterTest.java @@ -64,13 +64,6 @@ import io.airbyte.config.StreamSyncStats; import io.airbyte.config.SyncMode; import io.airbyte.config.SyncStats; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.featureflag.StructuredLogs; -import io.airbyte.featureflag.TestClient; -import io.airbyte.featureflag.Workspace; -import io.airbyte.persistence.job.WorkspaceHelper; -import io.airbyte.validation.json.JsonValidationException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; @@ -88,11 +81,9 @@ class JobConverterTest { - private FeatureFlagClient featureFlagClient; private JobConverter jobConverter; private LogClientManager logClientManager; private LogUtils logUtils; - private WorkspaceHelper workspaceHelper; private static final long CREATED_AT = System.currentTimeMillis() / 1000; private static final Path LOG_PATH = Path.of("log_path"); private static final String FAILURE_EXTERNAL_MESSAGE = "something went wrong"; @@ -255,11 +246,9 @@ class TestJob { @BeforeEach public void setUp() { - featureFlagClient = mock(TestClient.class); logClientManager = mock(LogClientManager.class); logUtils = mock(LogUtils.class); - workspaceHelper = mock(WorkspaceHelper.class); - jobConverter = new JobConverter(featureFlagClient, logClientManager, logUtils, workspaceHelper); + jobConverter = new JobConverter(logClientManager, logUtils); job = mock(Job.class); final Attempt attempt = mock(Attempt.class); when(job.getId()).thenReturn(JOB_ID); @@ -282,6 +271,7 @@ public void setUp() { @Test void testGetJobInfoRead() { + when(logClientManager.getLogs(any())).thenReturn(new LogEvents(List.of(), "1")); assertEquals(JOB_INFO_UNSTRUCTURED_LOGS, jobConverter.getJobInfoRead(job)); } @@ -303,13 +293,10 @@ void testGetJobWithAttemptsRead() { } @Test - void testGetJobWithAttemptsReadStructuredLogs() throws JsonValidationException, ConfigNotFoundException { + void testGetJobWithAttemptsReadStructuredLogs() { final String logEventVersion = "1"; - final UUID workspaceId = UUID.randomUUID(); - when(featureFlagClient.boolVariation(StructuredLogs.INSTANCE, new Workspace(workspaceId))).thenReturn(true); when(logClientManager.getLogs(any())).thenReturn( new LogEvents(List.of(new LogEvent(System.currentTimeMillis(), "message", "INFO", LogSource.PLATFORM, null, null)), logEventVersion)); - when(workspaceHelper.getWorkspaceForJobId(any())).thenReturn(workspaceId); final JobInfoRead jobInfoRead = jobConverter.getJobInfoRead(job); assertEquals(LogFormatType.STRUCTURED, jobInfoRead.getAttempts().getFirst().getLogType()); assertEquals(logEventVersion, jobInfoRead.getAttempts().getFirst().getLogs().getVersion()); @@ -390,9 +377,8 @@ class TestSynchronousJob { @BeforeEach public void setUp() { - featureFlagClient = mock(TestClient.class); - workspaceHelper = mock(WorkspaceHelper.class); - jobConverter = new JobConverter(featureFlagClient, mock(LogClientManager.class), mock(LogUtils.class), workspaceHelper); + logClientManager = mock(LogClientManager.class); + jobConverter = new JobConverter(logClientManager, logUtils); metadata = mock(SynchronousJobMetadata.class); when(metadata.getId()).thenReturn(JOB_ID); when(metadata.getConfigType()).thenReturn(CONFIG_TYPE); @@ -407,6 +393,7 @@ public void setUp() { @Test void testSynchronousJobRead() { + when(logClientManager.getLogs(any())).thenReturn(new LogEvents(List.of(), "1")); assertEquals(SYNCHRONOUS_JOB_INFO_UNSTRUCTURED_LOGS, jobConverter.getSynchronousJobRead(metadata)); } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionSchedulerHelperTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionSchedulerHelperTest.java index f387e842d85..67c3b8147eb 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionSchedulerHelperTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionSchedulerHelperTest.java @@ -9,6 +9,9 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import io.airbyte.api.model.generated.ConnectionScheduleData; import io.airbyte.api.model.generated.ConnectionScheduleDataBasicSchedule; @@ -22,25 +25,45 @@ import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.handlers.helpers.CatalogConverter; import io.airbyte.commons.server.handlers.helpers.ConnectionScheduleHelper; +import io.airbyte.commons.server.helpers.CronExpressionHelper; import io.airbyte.config.BasicSchedule.TimeUnit; import io.airbyte.config.Schedule; import io.airbyte.config.StandardSync; import io.airbyte.config.StandardSync.ScheduleType; import io.airbyte.config.helpers.FieldGenerator; +import io.airbyte.data.exceptions.ConfigNotFoundException; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.TestClient; +import io.airbyte.persistence.job.WorkspaceHelper; import io.airbyte.validation.json.JsonValidationException; import java.util.Collections; +import java.util.UUID; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; class ConnectionSchedulerHelperTest { - private final ConnectionScheduleHelper connectionScheduleHelper = - new ConnectionScheduleHelper(new ApiPojoConverters(new CatalogConverter(new FieldGenerator(), Collections.emptyList()))); + private ConnectionScheduleHelper connectionScheduleHelper; + private final ApiPojoConverters apiPojoConverters = new ApiPojoConverters(new CatalogConverter(new FieldGenerator(), Collections.emptyList())); + private final CronExpressionHelper cronExpressionHelper = new CronExpressionHelper(); private static final String EXPECTED_CRON_TIMEZONE = "UTC"; - private static final String EXPECTED_CRON_EXPRESSION = "* */2 * * * ?"; + private static final String EXPECTED_CRON_EXPRESSION = "0 0 12 * * ?"; + private static final UUID WORKSPACE_ID = UUID.randomUUID(); + private static final UUID ORGANIZATION_ID = UUID.randomUUID(); + + @BeforeEach + void setup() throws JsonValidationException, ConfigNotFoundException { + final WorkspaceHelper workspaceHelper = mock(WorkspaceHelper.class); + when(workspaceHelper.getWorkspaceForSourceId(any())).thenReturn(WORKSPACE_ID); + when(workspaceHelper.getOrganizationForWorkspace(WORKSPACE_ID)).thenReturn(ORGANIZATION_ID); + final FeatureFlagClient featureFlagClient = mock(TestClient.class); + connectionScheduleHelper = + new ConnectionScheduleHelper(apiPojoConverters, cronExpressionHelper, featureFlagClient, workspaceHelper); + } @Test - void testPopulateSyncScheduleFromManualType() throws JsonValidationException { + void testPopulateSyncScheduleFromManualType() throws JsonValidationException, ConfigNotFoundException { final StandardSync actual = new StandardSync(); connectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, ConnectionScheduleType.MANUAL, null); @@ -51,7 +74,7 @@ void testPopulateSyncScheduleFromManualType() throws JsonValidationException { } @Test - void testPopulateSyncScheduleFromBasicType() throws JsonValidationException { + void testPopulateSyncScheduleFromBasicType() throws JsonValidationException, ConfigNotFoundException { final StandardSync actual = new StandardSync(); connectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, ConnectionScheduleType.BASIC, new ConnectionScheduleData() @@ -68,7 +91,7 @@ ConnectionScheduleType.BASIC, new ConnectionScheduleData() } @Test - void testPopulateSyncScheduleFromCron() throws JsonValidationException { + void testPopulateSyncScheduleFromCron() throws JsonValidationException, ConfigNotFoundException { final StandardSync actual = new StandardSync(); connectionScheduleHelper.populateSyncFromScheduleTypeAndData(actual, ConnectionScheduleType.CRON, new ConnectionScheduleData() @@ -102,7 +125,7 @@ void testScheduleValidation() { } @Test - void testAvailableCronTimeZonesStayTheSame() { + void testAvailableCronTimeZonesStayTheSame() throws ConfigNotFoundException { /* * NOTE: this test exists to make sure that the server stays in sync with the frontend. The list of * supported timezones is copied from diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java index ca8f75a7b2b..dfe291a3924 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java @@ -6,8 +6,6 @@ import static io.airbyte.commons.server.helpers.ConnectionHelpers.FIELD_NAME; import static io.airbyte.commons.server.helpers.ConnectionHelpers.SECOND_FIELD_NAME; -import static io.airbyte.config.EnvConfigs.DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE; -import static io.airbyte.config.EnvConfigs.DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE; import static io.airbyte.config.Job.REPLICATION_TYPES; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; @@ -15,12 +13,10 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -61,7 +57,6 @@ import io.airbyte.api.model.generated.DestinationSyncMode; import io.airbyte.api.model.generated.FieldAdd; import io.airbyte.api.model.generated.FieldTransform; -import io.airbyte.api.model.generated.InternalOperationResult; import io.airbyte.api.model.generated.JobAggregatedStats; import io.airbyte.api.model.generated.JobConfigType; import io.airbyte.api.model.generated.JobRead; @@ -95,9 +90,11 @@ import io.airbyte.commons.server.handlers.helpers.CatalogConverter; import io.airbyte.commons.server.handlers.helpers.ConnectionScheduleHelper; import io.airbyte.commons.server.handlers.helpers.ConnectionTimelineEventHelper; +import io.airbyte.commons.server.handlers.helpers.MapperSecretHelper; import io.airbyte.commons.server.handlers.helpers.NotificationHelper; import io.airbyte.commons.server.handlers.helpers.StatsAggregationHelper; import io.airbyte.commons.server.helpers.ConnectionHelpers; +import io.airbyte.commons.server.helpers.CronExpressionHelper; import io.airbyte.commons.server.scheduler.EventRunner; import io.airbyte.commons.server.validation.CatalogValidator; import io.airbyte.commons.server.validation.ValidationError; @@ -126,7 +123,6 @@ import io.airbyte.config.JobResetConnectionConfig; import io.airbyte.config.JobStatus; import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.JobWithStatusAndTimestamp; import io.airbyte.config.MapperConfig; import io.airbyte.config.MapperOperationName; import io.airbyte.config.NotificationSettings; @@ -179,7 +175,6 @@ import io.airbyte.mappers.transformations.DestinationCatalogGenerator.MapperError; import io.airbyte.mappers.transformations.DestinationCatalogGenerator.MapperErrorType; import io.airbyte.mappers.transformations.HashingMapper; -import io.airbyte.persistence.job.JobNotifier; import io.airbyte.persistence.job.JobPersistence; import io.airbyte.persistence.job.WorkspaceHelper; import io.airbyte.persistence.job.factory.OAuthConfigSupplier; @@ -218,16 +213,6 @@ @SuppressWarnings("PMD.AvoidDuplicateLiterals") class ConnectionsHandlerTest { - private static final Instant CURRENT_INSTANT = Instant.now(); - private static final JobWithStatusAndTimestamp FAILED_JOB = - new JobWithStatusAndTimestamp(1, JobStatus.FAILED, CURRENT_INSTANT.getEpochSecond(), CURRENT_INSTANT.getEpochSecond()); - private static final JobWithStatusAndTimestamp SUCCEEDED_JOB = - new JobWithStatusAndTimestamp(1, JobStatus.SUCCEEDED, CURRENT_INSTANT.getEpochSecond(), CURRENT_INSTANT.getEpochSecond()); - private static final JobWithStatusAndTimestamp CANCELLED_JOB = - new JobWithStatusAndTimestamp(1, JobStatus.CANCELLED, CURRENT_INSTANT.getEpochSecond(), CURRENT_INSTANT.getEpochSecond()); - private static final int MAX_FAILURE_JOBS_IN_A_ROW = DEFAULT_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE; - private static final int MAX_DAYS_OF_ONLY_FAILED_JOBS = DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE; - private static final int MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_WARNING = DEFAULT_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE / 2; private static final String PRESTO_TO_HUDI = "presto to hudi"; private static final String PRESTO_TO_HUDI_PREFIX = "presto_to_hudi"; private static final String SOURCE_TEST = "source-test"; @@ -242,7 +227,7 @@ class ConnectionsHandlerTest { private static final String AZKABAN_USERS = "azkaban_users"; private static final String CRON_TIMEZONE_UTC = "UTC"; private static final String TIMEZONE_LOS_ANGELES = "America/Los_Angeles"; - private static final String CRON_EXPRESSION = "* */2 * * * ?"; + private static final String CRON_EXPRESSION = "0 0 */2 * * ?"; private static final String STREAM_SELECTION_DATA = "null/users-data0"; private JobPersistence jobPersistence; private Supplier uuidGenerator; @@ -282,11 +267,11 @@ class ConnectionsHandlerTest { private WorkspaceService workspaceService; private SecretPersistenceConfigService secretPersistenceConfigService; private ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; + private MapperSecretHelper mapperSecretHelper; private DestinationHandler destinationHandler; private SourceHandler sourceHandler; private StreamRefreshesHandler streamRefreshesHandler; - private JobNotifier jobNotifier; private Job job; private StreamGenerationRepository streamGenerationRepository; private CatalogGenerationSetter catalogGenerationSetter; @@ -299,10 +284,11 @@ class ConnectionsHandlerTest { private CatalogService catalogService; private ConnectionService connectionService; private DestinationCatalogGenerator destinationCatalogGenerator; + private ConnectionScheduleHelper connectionSchedulerHelper; private final CatalogConverter catalogConverter = new CatalogConverter(new FieldGenerator(), Collections.singletonList(new HashingMapper())); private final ApplySchemaChangeHelper applySchemaChangeHelper = new ApplySchemaChangeHelper(catalogConverter); private final ApiPojoConverters apiPojoConverters = new ApiPojoConverters(catalogConverter); - private final ConnectionScheduleHelper connectionSchedulerHelper = new ConnectionScheduleHelper(apiPojoConverters); + private final CronExpressionHelper cronExpressionHelper = new CronExpressionHelper(); @SuppressWarnings("unchecked") @BeforeEach @@ -414,6 +400,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio connectionTimelineEventService = mock(ConnectionTimelineEventService.class); connectionTimelineEventHelper = mock(ConnectionTimelineEventHelper.class); statePersistence = mock(StatePersistence.class); + mapperSecretHelper = mock(MapperSecretHelper.class); featureFlagClient = mock(TestClient.class); @@ -449,9 +436,9 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio catalogConverter, apiPojoConverters); + connectionSchedulerHelper = new ConnectionScheduleHelper(apiPojoConverters, cronExpressionHelper, featureFlagClient, workspaceHelper); matchSearchHandler = new MatchSearchHandler(destinationHandler, sourceHandler, sourceService, destinationService, connectionService, apiPojoConverters); - jobNotifier = mock(JobNotifier.class); featureFlagClient = mock(TestClient.class); job = mock(Job.class); streamGenerationRepository = mock(StreamGenerationRepository.class); @@ -466,6 +453,10 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio destinationCatalogGenerator = mock(DestinationCatalogGenerator.class); when(destinationCatalogGenerator.generateDestinationCatalog(any())) .thenReturn(new CatalogGenerationResult(new ConfiguredAirbyteCatalog(), Map.of())); + + when(mapperSecretHelper.maskMapperSecrets(any())).thenAnswer(invocation -> invocation.getArgument(0)); + when(mapperSecretHelper.createAndReplaceMapperSecrets(any(), any())).thenAnswer(invocation -> invocation.getArgument(1)); + when(mapperSecretHelper.updateAndReplaceMapperSecrets(any(), any(), any())).thenAnswer(invocation -> invocation.getArgument(2)); } @Nested @@ -485,9 +476,6 @@ void setUp() throws JsonValidationException, ConfigNotFoundException, IOExceptio featureFlagClient, actorDefinitionVersionHelper, connectorDefinitionSpecificationHandler, - jobNotifier, - MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW, streamGenerationRepository, catalogGenerationSetter, catalogValidator, @@ -504,7 +492,8 @@ void setUp() throws JsonValidationException, ConfigNotFoundException, IOExceptio catalogConverter, applySchemaChangeHelper, apiPojoConverters, - connectionSchedulerHelper); + connectionSchedulerHelper, + mapperSecretHelper); when(uuidGenerator.get()).thenReturn(standardSync.getConnectionId()); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() @@ -910,203 +899,6 @@ void testEnumConversion() { assertTrue(Enums.isCompatible(NamespaceDefinitionType.class, io.airbyte.config.JobSyncConfig.NamespaceDefinitionType.class)); } - @Nested - class AutoDisableConnection { - - @SuppressWarnings("LineLength") - @Test - @DisplayName("Test that the connection is __not__ disabled and warning is sent for connections that have failed `MAX_FAILURE_JOBS_IN_A_ROW / 2` times") - void testWarningNotificationsForAutoDisablingMaxNumFailures() throws IOException, JsonValidationException, ConfigNotFoundException { - - // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW/2 and 1 success - final List jobs = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW / 2, FAILED_JOB)); - jobs.add(SUCCEEDED_JOB); - - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(jobs); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertFalse(internalOperationResult.getSucceeded()); - verify(connectionService, Mockito.never()).writeStandardSync(any()); - verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, times(1)).autoDisableConnectionWarning(any(), any()); - } - - @SuppressWarnings("LineLength") - @Test - @DisplayName("Test that the connection is __not__ disabled and warning is sent after only failed jobs in last `MAX_DAYS_OF_STRAIGHT_FAILURE / 2` days") - void testWarningNotificationsForAutoDisablingMaxDaysOfFailure() throws IOException, JsonValidationException, ConfigNotFoundException { - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))) - .thenReturn(Collections.singletonList(FAILED_JOB)); - - when(job.getCreatedAtInSecond()).thenReturn( - CURRENT_INSTANT.getEpochSecond() - java.util.concurrent.TimeUnit.DAYS.toSeconds(MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_WARNING)); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertFalse(internalOperationResult.getSucceeded()); - verify(connectionService, Mockito.never()).writeStandardSync(any()); - verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, times(1)).autoDisableConnectionWarning(any(), any()); - } - - @Test - @DisplayName("Test that the connection is __not__ disabled and no warning is sent after one was just sent for failing multiple days") - void testWarningNotificationsDoesNotSpam() throws IOException, JsonValidationException, ConfigNotFoundException { - final List jobs = new ArrayList<>(Collections.nCopies(2, FAILED_JOB)); - final long jobCreateOrUpdatedInSeconds = - CURRENT_INSTANT.getEpochSecond() - java.util.concurrent.TimeUnit.DAYS.toSeconds(MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_WARNING); - - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(jobs); - - when(job.getCreatedAtInSecond()).thenReturn(jobCreateOrUpdatedInSeconds); - when(job.getUpdatedAtInSecond()).thenReturn(jobCreateOrUpdatedInSeconds); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertFalse(internalOperationResult.getSucceeded()); - verify(connectionService, Mockito.never()).writeStandardSync(any()); - verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); - } - - @Test - @DisplayName("Test that the connection is __not__ disabled and no warning is sent after one was just sent for consecutive failures") - void testWarningNotificationsDoesNotSpamAfterConsecutiveFailures() throws IOException, JsonValidationException, ConfigNotFoundException { - final List jobs = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, FAILED_JOB)); - final long jobCreateOrUpdatedInSeconds = - CURRENT_INSTANT.getEpochSecond() - java.util.concurrent.TimeUnit.DAYS.toSeconds(MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_WARNING); - - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(jobs); - - when(job.getCreatedAtInSecond()).thenReturn(jobCreateOrUpdatedInSeconds); - when(job.getUpdatedAtInSecond()).thenReturn(jobCreateOrUpdatedInSeconds); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertFalse(internalOperationResult.getSucceeded()); - verify(connectionService, Mockito.never()).writeStandardSync(any()); - verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); - } - - @SuppressWarnings("LineLength") - @Test - @DisplayName("Test that the connection is _not_ disabled and no warning is sent after only failed jobs and oldest job is less than `MAX_DAYS_OF_STRAIGHT_FAILURE / 2 `days old") - void testOnlyFailuresButFirstJobYoungerThanMaxDaysWarning() throws IOException, JsonValidationException, ConfigNotFoundException { - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))) - .thenReturn(Collections.singletonList(FAILED_JOB)); - - when(job.getCreatedAtInSecond()).thenReturn(CURRENT_INSTANT.getEpochSecond()); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertFalse(internalOperationResult.getSucceeded()); - verify(connectionService, Mockito.never()).writeStandardSync(any()); - verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); - } - - // test should disable / shouldn't disable cases - - @Test - @DisplayName("Test that the connection is disabled after MAX_FAILURE_JOBS_IN_A_ROW straight failures") - void testMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { - // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW and 1 success - final List jobs = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW, FAILED_JOB)); - jobs.add(SUCCEEDED_JOB); - - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(jobs); - when(connectionService.getStandardSync(connectionId)).thenReturn(standardSync); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertTrue(internalOperationResult.getSucceeded()); - verifyDisabled(); - } - - @Test - @DisplayName("Test that the connection is _not_ disabled after MAX_FAILURE_JOBS_IN_A_ROW - 1 straight failures") - void testLessThanMaxFailuresInARow() throws IOException, JsonValidationException, ConfigNotFoundException { - // from most recent to least recent: MAX_FAILURE_JOBS_IN_A_ROW-1 and 1 success - final List jobs = new ArrayList<>(Collections.nCopies(MAX_FAILURE_JOBS_IN_A_ROW - 1, FAILED_JOB)); - jobs.add(SUCCEEDED_JOB); - - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(jobs); - when(job.getCreatedAtInSecond()).thenReturn( - CURRENT_INSTANT.getEpochSecond() - java.util.concurrent.TimeUnit.DAYS.toSeconds(MAX_DAYS_OF_ONLY_FAILED_JOBS)); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertFalse(internalOperationResult.getSucceeded()); - verify(connectionService, Mockito.never()).writeStandardSync(any()); - verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); - - } - - @Test - @DisplayName("Test that the connection is _not_ disabled after 0 jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") - void testNoRuns() throws IOException, JsonValidationException, ConfigNotFoundException { - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))).thenReturn(Collections.emptyList()); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertFalse(internalOperationResult.getSucceeded()); - verify(connectionService, Mockito.never()).writeStandardSync(any()); - verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); - } - - @Test - @DisplayName("Test that the connection is disabled after only failed jobs in last MAX_DAYS_OF_STRAIGHT_FAILURE days") - void testOnlyFailuresInMaxDays() throws IOException, JsonValidationException, ConfigNotFoundException { - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))) - .thenReturn(Collections.singletonList(FAILED_JOB)); - - when(job.getCreatedAtInSecond()).thenReturn( - CURRENT_INSTANT.getEpochSecond() - java.util.concurrent.TimeUnit.DAYS.toSeconds(MAX_DAYS_OF_ONLY_FAILED_JOBS)); - when(connectionService.getStandardSync(connectionId)).thenReturn(standardSync); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertTrue(internalOperationResult.getSucceeded()); - verifyDisabled(); - } - - @Test - @DisplayName("Test that the connection is _not_ disabled after only cancelled jobs") - void testIgnoreOnlyCancelledRuns() throws IOException, JsonValidationException, ConfigNotFoundException { - when(jobPersistence.listJobStatusAndTimestampWithConnection(connectionId, REPLICATION_TYPES, - CURRENT_INSTANT.minus(MAX_DAYS_OF_ONLY_FAILED_JOBS, ChronoUnit.DAYS))) - .thenReturn(Collections.singletonList(CANCELLED_JOB)); - - final InternalOperationResult internalOperationResult = connectionsHandler.autoDisableConnection(connectionId, CURRENT_INSTANT); - - assertFalse(internalOperationResult.getSucceeded()); - verify(connectionService, Mockito.never()).writeStandardSync(any()); - verify(jobNotifier, Mockito.never()).autoDisableConnection(any(), any()); - } - - private void verifyDisabled() throws IOException { - verify(connectionService, times(1)).writeStandardSync( - argThat(standardSync -> (standardSync.getStatus().equals(Status.INACTIVE) && standardSync.getConnectionId().equals(connectionId)))); - verify(connectionService, times(1)).writeStandardSync(standardSync); - verify(jobNotifier, times(1)).autoDisableConnection(eq(job), any()); - verify(jobNotifier, Mockito.never()).autoDisableConnectionWarning(any(), any()); - } - - } - @Nested class CreateConnection { @@ -1244,7 +1036,7 @@ void testCreateConnectionWithSelectedFields() } @Test - void testCreateConnectionWithMappers() + void testCreateConnectionWithHashedFields() throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { final StandardWorkspace workspace = new StandardWorkspace() .withWorkspaceId(workspaceId) @@ -1265,6 +1057,34 @@ void testCreateConnectionWithMappers() verify(connectionService).writeStandardSync(standardSync.withNotifySchemaChangesByEmail(null)); } + @Test + void testCreateConnectionWithMappers() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { + final StandardWorkspace workspace = new StandardWorkspace() + .withWorkspaceId(workspaceId) + .withDefaultGeography(Geography.EU); + when(workspaceService.getStandardWorkspaceNoSecrets(workspaceId, true)).thenReturn(workspace); + + final UUID newMapperId = UUID.randomUUID(); + when(uuidGenerator.get()).thenReturn(connectionId, newMapperId); + final MapperConfig hashingMapper = MapperHelperKt.createHashingMapper(FIELD_NAME, newMapperId); + + final AirbyteCatalog catalog = ConnectionHelpers.generateBasicApiCatalog(); + catalog.getStreams().getFirst().getConfig().mappers(List.of(new ConfiguredStreamMapper() + .type(StreamMapperType.HASHING) + .mapperConfiguration(Jsons.jsonNode(hashingMapper.config())))); + + final ConnectionCreate connectionCreate = buildConnectionCreateRequest(standardSync, catalog); + + final ConnectionRead actualConnectionRead = connectionsHandler.createConnection(connectionCreate); + + final ConnectionRead expectedConnectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); + assertEquals(expectedConnectionRead, actualConnectionRead); + + standardSync.getCatalog().getStreams().getFirst().setMappers(List.of(hashingMapper)); + verify(connectionService).writeStandardSync(standardSync.withNotifySchemaChangesByEmail(null)); + } + @Test void testCreateConnectionValidatesMappers() throws JsonValidationException, ConfigNotFoundException, IOException { final StandardWorkspace workspace = new StandardWorkspace() @@ -1289,6 +1109,12 @@ public String name() { return MapperOperationName.HASHING; } + @Nullable + @Override + public UUID id() { + return null; + } + @Nullable @Override public String documentationUrl() { @@ -1591,6 +1417,8 @@ void testUpdateConnectionWithDuplicateStreamsShouldThrowException() { @Test void testUpdateConnectionPatchScheduleToCron() throws Exception { + when(workspaceHelper.getWorkspaceForSourceId(any())).thenReturn(UUID.randomUUID()); + when(workspaceHelper.getOrganizationForWorkspace(any())).thenReturn(UUID.randomUUID()); final ConnectionScheduleData cronScheduleData = new ConnectionScheduleData().cron( new ConnectionScheduleDataCron().cronExpression(CRON_EXPRESSION).cronTimeZone(CRON_TIMEZONE_UTC)); @@ -1739,6 +1567,7 @@ void testUpdateConnectionPatchValidatesMappers() throws Exception { .syncCatalog(catalogForUpdate); final String streamName = "stream-name"; + final UUID mapperId = UUID.randomUUID(); when(connectionService.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); when(destinationCatalogGenerator.generateDestinationCatalog(catalogConverter.toConfiguredInternal(catalogForUpdate))) .thenReturn(new CatalogGenerationResult(new ConfiguredAirbyteCatalog(), @@ -1758,6 +1587,11 @@ public String documentationUrl() { return null; } + @Override + public UUID id() { + return mapperId; + } + @NotNull @Override public String name() { @@ -1774,7 +1608,7 @@ public String name() { new ProblemMapperErrorData() .stream(streamName) .error(MapperErrorType.INVALID_MAPPER_CONFIG.name()) - .mapper(new ProblemMapperErrorDataMapper().type(MapperOperationName.HASHING).mapperConfiguration(Map.of()))); + .mapper(new ProblemMapperErrorDataMapper().id(mapperId).type(MapperOperationName.HASHING).mapperConfiguration(Map.of()))); } @Test @@ -1821,10 +1655,13 @@ void testUpdateConnectionPatchMappers() throws Exception { standardSync.setCatalog(ConnectionHelpers.generateAirbyteCatalogWithTwoFields()); // Send an update that hashes one of the fields, using mappers - final HashingMapperConfig hashingMapper = MapperHelperKt.createHashingMapper(FIELD_NAME); + final HashingMapperConfig hashingMapper = MapperHelperKt.createHashingMapper(FIELD_NAME, UUID.randomUUID()); final AirbyteCatalog catalogForUpdate = ConnectionHelpers.generateApiCatalogWithTwoFields(); catalogForUpdate.getStreams().getFirst().getConfig().addMappersItem( - new ConfiguredStreamMapper().type(StreamMapperType.HASHING).mapperConfiguration(Jsons.jsonNode(hashingMapper.getConfig()))); + new ConfiguredStreamMapper() + .id(hashingMapper.id()) + .type(StreamMapperType.HASHING) + .mapperConfiguration(Jsons.jsonNode(hashingMapper.getConfig()))); // Expect mapper in the persisted catalog final ConfiguredAirbyteCatalog expectedPersistedCatalog = ConnectionHelpers.generateAirbyteCatalogWithTwoFields(); @@ -2032,9 +1869,6 @@ void setUp() { featureFlagClient, actorDefinitionVersionHelper, connectorDefinitionSpecificationHandler, - jobNotifier, - MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW, streamGenerationRepository, catalogGenerationSetter, catalogValidator, @@ -2048,7 +1882,7 @@ void setUp() { connectionService, workspaceService, destinationCatalogGenerator, catalogConverter, applySchemaChangeHelper, - apiPojoConverters, connectionSchedulerHelper); + apiPojoConverters, connectionSchedulerHelper, mapperSecretHelper); } private Attempt generateMockAttemptWithStreamStats(final Instant attemptTime, final List, Long>> streamsToRecordsSynced) { @@ -2272,9 +2106,6 @@ void setUp() { featureFlagClient, actorDefinitionVersionHelper, connectorDefinitionSpecificationHandler, - jobNotifier, - MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW, streamGenerationRepository, catalogGenerationSetter, catalogValidator, @@ -2288,7 +2119,7 @@ void setUp() { connectionService, workspaceService, destinationCatalogGenerator, - catalogConverter, applySchemaChangeHelper, apiPojoConverters, connectionSchedulerHelper); + catalogConverter, applySchemaChangeHelper, apiPojoConverters, connectionSchedulerHelper, mapperSecretHelper); } @Test @@ -2693,120 +2524,6 @@ void testDiffDifferentDestinationSyncMode() { assertEquals(Set.of(new StreamDescriptor().name(STREAM1)), changedSd); } - @Test - void testDiffAddedFieldHash() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - null); - - final AirbyteStreamConfiguration streamConfigurationWithHashedFields = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - List.of(new SelectedFieldInfo().fieldPath(List.of("field")))); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - null); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfigurationWithHashedFields), - getStreamAndConfig(STREAM2, streamConfiguration2))); - - final Set changedSd = connectionsHandler.getConfigurationDiff(catalog1, catalog2); - assertFalse(changedSd.isEmpty()); - assertEquals(1, changedSd.size()); - assertEquals(Set.of(new StreamDescriptor().name(STREAM1)), changedSd); - } - - @Test - void testDiffRemovingSecondFieldHash() { - final AirbyteStreamConfiguration streamConfiguration1 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - List.of( - new SelectedFieldInfo().fieldPath(List.of("field_1")), - new SelectedFieldInfo().fieldPath(List.of("field_2")))); - - final AirbyteStreamConfiguration streamConfigurationWithHashedFields = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - List.of(new SelectedFieldInfo().fieldPath(List.of("field_1")))); - - final AirbyteStreamConfiguration streamConfiguration2 = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - null); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration1), - getStreamAndConfig(STREAM2, streamConfiguration2))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfigurationWithHashedFields), - getStreamAndConfig(STREAM2, streamConfiguration2))); - - final Set changedSd = connectionsHandler.getConfigurationDiff(catalog1, catalog2); - assertFalse(changedSd.isEmpty()); - assertEquals(1, changedSd.size()); - assertEquals(Set.of(new StreamDescriptor().name(STREAM1)), changedSd); - } - - @Test - void testNoDiffWhenFieldHashOrderDiffers() { - final AirbyteStreamConfiguration streamConfiguration = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - List.of( - new SelectedFieldInfo().fieldPath(List.of("field_1")), - new SelectedFieldInfo().fieldPath(List.of("field_2")))); - - final AirbyteStreamConfiguration streamConfigurationWithReorderedHashedFields = getStreamConfiguration( - List.of(CURSOR1), - List.of(List.of(PK1)), - SyncMode.INCREMENTAL, - DestinationSyncMode.APPEND_DEDUP, - List.of( - new SelectedFieldInfo().fieldPath(List.of("field_2")), - new SelectedFieldInfo().fieldPath(List.of("field_1")))); - - final AirbyteCatalog catalog1 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfiguration))); - final AirbyteCatalog catalog2 = new AirbyteCatalog() - .streams( - List.of( - getStreamAndConfig(STREAM1, streamConfigurationWithReorderedHashedFields))); - - assertTrue(connectionsHandler.getConfigurationDiff(catalog1, catalog2).isEmpty()); - } - @Test void testConnectionStatus() throws IOException, JsonValidationException, ConfigNotFoundException { when(connectionService.getStandardSync(standardSync.getConnectionId())).thenReturn(standardSync); @@ -3200,9 +2917,6 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio featureFlagClient, actorDefinitionVersionHelper, connectorDefinitionSpecificationHandler, - jobNotifier, - MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW, streamGenerationRepository, catalogGenerationSetter, catalogValidator, @@ -3217,7 +2931,7 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio workspaceService, destinationCatalogGenerator, catalogConverter, applySchemaChangeHelper, - apiPojoConverters, connectionSchedulerHelper); + apiPojoConverters, connectionSchedulerHelper, mapperSecretHelper); } @Test @@ -3464,9 +3178,6 @@ void setUp() { featureFlagClient, actorDefinitionVersionHelper, connectorDefinitionSpecificationHandler, - jobNotifier, - MAX_DAYS_OF_ONLY_FAILED_JOBS, - MAX_FAILURE_JOBS_IN_A_ROW, streamGenerationRepository, catalogGenerationSetter, catalogValidator, @@ -3483,7 +3194,8 @@ void setUp() { catalogConverter, applySchemaChangeHelper, apiPojoConverters, - connectionSchedulerHelper); + connectionSchedulerHelper, + mapperSecretHelper); } @Test diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java index d143b96c072..522d8f75959 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorBuilderProjectsHandlerTest.java @@ -27,6 +27,9 @@ import io.airbyte.api.model.generated.BaseActorDefinitionVersionInfo; import io.airbyte.api.model.generated.BuilderProjectForDefinitionRequestBody; import io.airbyte.api.model.generated.BuilderProjectForDefinitionResponse; +import io.airbyte.api.model.generated.BuilderProjectOauthConsentRequest; +import io.airbyte.api.model.generated.CompleteConnectorBuilderProjectOauthRequest; +import io.airbyte.api.model.generated.CompleteOAuthResponse; import io.airbyte.api.model.generated.ConnectorBuilderHttpRequest; import io.airbyte.api.model.generated.ConnectorBuilderHttpRequest.HttpMethodEnum; import io.airbyte.api.model.generated.ConnectorBuilderHttpResponse; @@ -46,6 +49,7 @@ import io.airbyte.api.model.generated.DeclarativeManifestRequestBody; import io.airbyte.api.model.generated.DeclarativeSourceManifest; import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.api.model.generated.OAuthConsentRead; import io.airbyte.api.model.generated.SourceDefinitionIdBody; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.json.Jsons; @@ -86,13 +90,18 @@ import io.airbyte.data.services.WorkspaceService; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; +import io.airbyte.oauth.OAuthImplementationFactory; +import io.airbyte.oauth.declarative.DeclarativeOAuthFlow; +import io.airbyte.protocol.models.AdvancedAuth; import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.protocol.models.OAuthConfigSpecification; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.net.URI; import java.time.OffsetDateTime; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.function.Supplier; @@ -163,6 +172,7 @@ class ConnectorBuilderProjectsHandlerTest { private ActorDefinitionService actorDefinitionService; private RemoteDefinitionsProvider remoteDefinitionsProvider; private ConnectorSpecification adaptedConnectorSpecification; + private OAuthImplementationFactory oauthImplementationFactory; private UUID workspaceId; private final String specString = """ @@ -219,6 +229,7 @@ void setUp() throws JsonProcessingException { actorDefinitionService = mock(ActorDefinitionService.class); remoteDefinitionsProvider = mock(RemoteDefinitionsProvider.class); adaptedConnectorSpecification = mock(ConnectorSpecification.class); + oauthImplementationFactory = mock(OAuthImplementationFactory.class); setupConnectorSpecificationAdapter(any(), ""); workspaceId = UUID.randomUUID(); @@ -227,7 +238,7 @@ void setUp() throws JsonProcessingException { manifestInjector, workspaceService, featureFlagClient, secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService, sourceService, secretsProcessor, - connectorBuilderServerApiClient, actorDefinitionService, remoteDefinitionsProvider); + connectorBuilderServerApiClient, actorDefinitionService, remoteDefinitionsProvider, oauthImplementationFactory); when(manifestInjector.getCdkVersion(any())).thenReturn(A_CDK_VERSION); when(declarativeManifestImageVersionService.getDeclarativeManifestImageVersionByMajorVersion(anyInt())) @@ -1001,4 +1012,78 @@ void testCreateForkedConnectorBuilderProject() throws JsonValidationException, C eq(baseActorDefinitionVersionId), eq(null), eq(null)); } + @Test + void testGetConnectorBuilderProjectOAuthConsent() throws Exception { + final UUID projectId = UUID.randomUUID(); + final UUID workspaceId = UUID.randomUUID(); + final String redirectUrl = "https://airbyte.com/auth_flow"; + final String consentUrl = "https://consent.url"; + + final OAuthConfigSpecification oAuthConfigSpecification = mock(OAuthConfigSpecification.class); + final ConnectorSpecification spec = + new ConnectorSpecification().withAdvancedAuth(new AdvancedAuth().withOauthConfigSpecification(oAuthConfigSpecification)); + final ConnectorBuilderProject project = + new ConnectorBuilderProject().withManifestDraft(Jsons.jsonNode(Map.of("spec", spec))).withTestingValues(testingValuesWithSecretCoordinates); + when(connectorBuilderService.getConnectorBuilderProject(projectId, true)).thenReturn(project); + when(secretsRepositoryReader.hydrateConfigFromDefaultSecretPersistence(testingValuesWithSecretCoordinates)).thenReturn(testingValues); + + final DeclarativeOAuthFlow oAuthFlowImplementation = mock(DeclarativeOAuthFlow.class); + when(oAuthFlowImplementation.getSourceConsentUrl(eq(workspaceId), eq(null), eq(redirectUrl), eq(testingValues), + any(OAuthConfigSpecification.class), eq(testingValues))) + .thenReturn(consentUrl); + + when(oauthImplementationFactory.createDeclarativeOAuthImplementation(any(ConnectorSpecification.class))) + .thenReturn(oAuthFlowImplementation); + + final BuilderProjectOauthConsentRequest request = new BuilderProjectOauthConsentRequest() + .builderProjectId(projectId) + .workspaceId(workspaceId) + .redirectUrl(redirectUrl); + + final OAuthConsentRead response = connectorBuilderProjectsHandler.getConnectorBuilderProjectOAuthConsent(request); + + verify(oAuthFlowImplementation, times(1)).getSourceConsentUrl(eq(workspaceId), eq(null), eq(redirectUrl), eq(testingValues), + any(OAuthConfigSpecification.class), eq(testingValues)); + assertEquals(consentUrl, response.getConsentUrl()); + } + + @Test + void testCompleteConnectorBuilderProjectOAuth() throws Exception { + final UUID projectId = UUID.randomUUID(); + final UUID workspaceId = UUID.randomUUID(); + final String redirectUrl = "https://airbyte.com/auth_flow"; + final Map queryParams = Map.of("code", "12345"); + final Map oAuthResponse = Map.of("accessToken", "token"); + + final OAuthConfigSpecification oAuthConfigSpecification = mock(OAuthConfigSpecification.class); + final ConnectorSpecification spec = + new ConnectorSpecification().withAdvancedAuth(new AdvancedAuth().withOauthConfigSpecification(oAuthConfigSpecification)); + final ConnectorBuilderProject project = + new ConnectorBuilderProject().withManifestDraft(Jsons.jsonNode(Map.of("spec", spec))).withTestingValues(testingValuesWithSecretCoordinates); + when(connectorBuilderService.getConnectorBuilderProject(projectId, true)).thenReturn(project); + when(secretsRepositoryReader.hydrateConfigFromDefaultSecretPersistence(testingValuesWithSecretCoordinates)).thenReturn(testingValues); + + final DeclarativeOAuthFlow oAuthFlowMock = mock(DeclarativeOAuthFlow.class); + when(oAuthFlowMock.completeSourceOAuth(eq(workspaceId), eq(null), eq(queryParams), eq(redirectUrl), eq(testingValues), + any(OAuthConfigSpecification.class), eq(testingValues))) + .thenReturn(oAuthResponse); + + when(oauthImplementationFactory.createDeclarativeOAuthImplementation(any(ConnectorSpecification.class))) + .thenReturn(oAuthFlowMock); + + when(connectorBuilderService.getConnectorBuilderProject(eq(projectId), eq(true))).thenReturn(project); + + final CompleteConnectorBuilderProjectOauthRequest request = new CompleteConnectorBuilderProjectOauthRequest() + .builderProjectId(projectId) + .workspaceId(workspaceId) + .queryParams(queryParams) + .redirectUrl(redirectUrl); + + final CompleteOAuthResponse response = connectorBuilderProjectsHandler.completeConnectorBuilderProjectOAuth(request); + final CompleteOAuthResponse expectedResponse = new CompleteOAuthResponse().requestSucceeded(true).authPayload(oAuthResponse); + verify(oAuthFlowMock, times(1)).completeSourceOAuth(eq(workspaceId), eq(null), eq(queryParams), eq(redirectUrl), eq(testingValues), + any(OAuthConfigSpecification.class), eq(testingValues)); + assertEquals(expectedResponse, response); + } + } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConnectorDefinitionSpecificationHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorDefinitionSpecificationHandlerTest.java similarity index 79% rename from airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConnectorDefinitionSpecificationHandlerTest.java rename to airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorDefinitionSpecificationHandlerTest.java index f3ada2553bd..02fb00ed724 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/ConnectorDefinitionSpecificationHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectorDefinitionSpecificationHandlerTest.java @@ -2,7 +2,7 @@ * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. */ -package io.airbyte.commons.server.handlers.helpers; +package io.airbyte.commons.server.handlers; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; @@ -19,22 +19,28 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.Exceptions; import io.airbyte.commons.server.converters.JobConverter; -import io.airbyte.commons.server.handlers.ConnectorDefinitionSpecificationHandler; import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.DestinationConnection; +import io.airbyte.config.DestinationOAuthParameter; import io.airbyte.config.SourceConnection; +import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.data.exceptions.ConfigNotFoundException; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.SourceService; +import io.airbyte.protocol.models.AdvancedAuth; +import io.airbyte.protocol.models.AdvancedAuth.AuthFlowType; import io.airbyte.protocol.models.ConnectorSpecification; +import io.airbyte.protocol.models.OAuthConfigSpecification; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.net.URI; import java.util.HashMap; import java.util.List; +import java.util.Optional; import java.util.UUID; import org.assertj.core.api.CollectionAssert; import org.junit.jupiter.api.BeforeEach; @@ -68,6 +74,7 @@ class ConnectorDefinitionSpecificationHandlerTest { private SourceService sourceService; private DestinationService destinationService; + private OAuthService oAuthService; @BeforeEach void setup() { @@ -75,9 +82,10 @@ void setup() { jobConverter = mock(JobConverter.class); sourceService = mock(SourceService.class); destinationService = mock(DestinationService.class); + oAuthService = mock(OAuthService.class); connectorDefinitionSpecificationHandler = - new ConnectorDefinitionSpecificationHandler(actorDefinitionVersionHelper, jobConverter, sourceService, destinationService); + new ConnectorDefinitionSpecificationHandler(actorDefinitionVersionHelper, jobConverter, sourceService, destinationService, oAuthService); } @Test @@ -222,7 +230,7 @@ void testGetSourceSpec() @ValueSource(booleans = {true, false}) @ParameterizedTest - void testDestinationSyncModeEnrichment(boolean supportsRefreshes) + void testDestinationSyncModeEnrichment(final boolean supportsRefreshes) throws JsonValidationException, IOException, ConfigNotFoundException { final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId = new DestinationDefinitionIdWithWorkspaceId().destinationDefinitionId(UUID.randomUUID()).workspaceId(UUID.randomUUID()); @@ -261,7 +269,7 @@ void testDestinationSyncModeEnrichment(boolean supportsRefreshes) @ValueSource(booleans = {true, false}) @ParameterizedTest - void testDestinationSyncModeEnrichmentWithoutOverwrite(boolean supportsRefreshes) + void testDestinationSyncModeEnrichmentWithoutOverwrite(final boolean supportsRefreshes) throws JsonValidationException, IOException, ConfigNotFoundException { final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId = new DestinationDefinitionIdWithWorkspaceId().destinationDefinitionId(UUID.randomUUID()).workspaceId(UUID.randomUUID()); @@ -293,4 +301,58 @@ void testDestinationSyncModeEnrichmentWithoutOverwrite(boolean supportsRefreshes DestinationSyncMode.APPEND, DestinationSyncMode.APPEND_DEDUP)); } + @ValueSource(booleans = {true, false}) + @ParameterizedTest + void getDestinationSpecificationReadAdvancedAuth(final boolean advancedAuthCredentialsAvailable) throws IOException { + final UUID workspaceId = UUID.randomUUID(); + final UUID destinationDefinitionId = UUID.randomUUID(); + when(oAuthService.getDestinationOAuthParameterOptional(workspaceId, destinationDefinitionId)) + .thenReturn(advancedAuthCredentialsAvailable ? Optional.of(new DestinationOAuthParameter()) : Optional.empty()); + + final DestinationDefinitionIdWithWorkspaceId destinationDefinitionIdWithWorkspaceId = + new DestinationDefinitionIdWithWorkspaceId().destinationDefinitionId(destinationDefinitionId).workspaceId(workspaceId); + final StandardDestinationDefinition destinationDefinition = new StandardDestinationDefinition() + .withName(NAME) + .withDestinationDefinitionId(destinationDefinitionIdWithWorkspaceId.getDestinationDefinitionId()); + + final ConnectorSpecification connectorSpecification = new ConnectorSpecification() + .withDocumentationUrl(Exceptions.toRuntime(() -> new URI(CONNECTOR_URL))) + .withChangelogUrl(Exceptions.toRuntime(() -> new URI(CONNECTOR_URL))) + .withConnectionSpecification(Jsons.jsonNode(new HashMap<>())) + .withAdvancedAuth(new AdvancedAuth().withAuthFlowType(AuthFlowType.OAUTH_2_0).withOauthConfigSpecification(new OAuthConfigSpecification())); + + final DestinationDefinitionSpecificationRead response = + connectorDefinitionSpecificationHandler.getDestinationSpecificationRead(destinationDefinition, connectorSpecification, true, workspaceId); + + verify(oAuthService).getDestinationOAuthParameterOptional(workspaceId, destinationDefinitionId); + assertEquals(advancedAuthCredentialsAvailable, response.getAdvancedAuthCredentialsAvailable()); + } + + @ValueSource(booleans = {true, false}) + @ParameterizedTest + void getSourceSpecificationReadAdvancedAuth(final boolean advancedAuthCredentialsAvailable) throws IOException { + final UUID workspaceId = UUID.randomUUID(); + final UUID sourceDefinitionId = UUID.randomUUID(); + when(oAuthService.getSourceOAuthParameterOptional(workspaceId, sourceDefinitionId)) + .thenReturn(advancedAuthCredentialsAvailable ? Optional.of(new SourceOAuthParameter()) : Optional.empty()); + + final SourceDefinitionIdWithWorkspaceId sourceDefinitionIdWithWorkspaceId = + new SourceDefinitionIdWithWorkspaceId().sourceDefinitionId(sourceDefinitionId).workspaceId(workspaceId); + final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() + .withName(NAME) + .withSourceDefinitionId(sourceDefinitionIdWithWorkspaceId.getSourceDefinitionId()); + + final ConnectorSpecification connectorSpecification = new ConnectorSpecification() + .withDocumentationUrl(Exceptions.toRuntime(() -> new URI(CONNECTOR_URL))) + .withChangelogUrl(Exceptions.toRuntime(() -> new URI(CONNECTOR_URL))) + .withConnectionSpecification(Jsons.jsonNode(new HashMap<>())) + .withAdvancedAuth(new AdvancedAuth().withAuthFlowType(AuthFlowType.OAUTH_2_0).withOauthConfigSpecification(new OAuthConfigSpecification())); + + final SourceDefinitionSpecificationRead response = + connectorDefinitionSpecificationHandler.getSourceSpecificationRead(sourceDefinition, connectorSpecification, workspaceId); + + verify(oAuthService).getSourceOAuthParameterOptional(workspaceId, sourceDefinitionId); + assertEquals(advancedAuthCredentialsAvailable, response.getAdvancedAuthCredentialsAvailable()); + } + } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DiagnosticToolHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DiagnosticToolHandlerTest.java deleted file mode 100644 index 4deed5cea3d..00000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/DiagnosticToolHandlerTest.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers; - -import static io.airbyte.commons.server.handlers.DiagnosticToolHandlerKt.AIRBYTE_DEPLOYMENT_YAML; -import static io.airbyte.commons.server.handlers.DiagnosticToolHandlerKt.AIRBYTE_INSTANCE_YAML; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.LicenseInfoResponse; -import io.airbyte.api.model.generated.LicenseStatus; -import io.airbyte.config.ActorDefinitionVersion; -import io.airbyte.config.DestinationConnection; -import io.airbyte.config.SourceConnection; -import io.airbyte.config.StandardSync; -import io.airbyte.config.StandardWorkspace; -import io.airbyte.config.persistence.ActorDefinitionVersionHelper; -import io.airbyte.data.services.ConnectionService; -import io.airbyte.data.services.DestinationService; -import io.airbyte.data.services.SourceService; -import io.airbyte.data.services.WorkspaceService; -import io.airbyte.validation.json.JsonValidationException; -import io.fabric8.kubernetes.api.model.Container; -import io.fabric8.kubernetes.api.model.Node; -import io.fabric8.kubernetes.api.model.NodeCondition; -import io.fabric8.kubernetes.api.model.NodeList; -import io.fabric8.kubernetes.api.model.NodeStatus; -import io.fabric8.kubernetes.api.model.ObjectMeta; -import io.fabric8.kubernetes.api.model.Pod; -import io.fabric8.kubernetes.api.model.PodList; -import io.fabric8.kubernetes.api.model.PodSpec; -import io.fabric8.kubernetes.api.model.PodStatus; -import io.fabric8.kubernetes.api.model.Quantity; -import io.fabric8.kubernetes.api.model.ResourceRequirements; -import io.fabric8.kubernetes.client.KubernetesClient; -import io.fabric8.kubernetes.client.dsl.MixedOperation; -import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation; -import io.fabric8.kubernetes.client.dsl.PodResource; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.time.OffsetDateTime; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.UUID; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; -import org.jetbrains.annotations.NotNull; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; - -/** - * DiagnosticToolHandlerTest. - */ -@SuppressWarnings("PMD") -@ExtendWith(MockitoExtension.class) -class DiagnosticToolHandlerTest { - - private DiagnosticToolHandler diagnosticToolHandler; - private WorkspaceService workspaceService; - private ConnectionService connectionService; - private SourceService sourceService; - private DestinationService destinationService; - private InstanceConfigurationHandler instanceConfigurationHandler; - private ActorDefinitionVersionHelper actorDefinitionVersionHelper; - private KubernetesClient kubernetesClient; - - @BeforeEach - void beforeEach() throws JsonValidationException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { - workspaceService = mock(WorkspaceService.class); - connectionService = mock(ConnectionService.class); - sourceService = mock(SourceService.class); - destinationService = mock(DestinationService.class); - actorDefinitionVersionHelper = mock(ActorDefinitionVersionHelper.class); - instanceConfigurationHandler = mock(InstanceConfigurationHandler.class); - kubernetesClient = mock(KubernetesClient.class); - diagnosticToolHandler = new DiagnosticToolHandler( - workspaceService, - connectionService, - sourceService, - destinationService, - actorDefinitionVersionHelper, - instanceConfigurationHandler, - kubernetesClient); - - // Mock workspace API responses - final var workspace = getStandardWorkspace(); - when(workspaceService.listStandardWorkspaces(false)).thenReturn(List.of(workspace)); - when(connectionService.listWorkspaceStandardSyncs(workspace.getWorkspaceId(), false)).thenReturn(List.of(getStandardSync())); - when(sourceService.listWorkspaceSourceConnection(any())).thenReturn(List.of(getSource())); - when(sourceService.isSourceActive(any())).thenReturn(true); - when(destinationService.listWorkspaceDestinationConnection(any())).thenReturn(List.of(getDestination())); - when(destinationService.isDestinationActive(any())).thenReturn(true); - when(actorDefinitionVersionHelper.getSourceVersionWithOverrideStatus(any(), any(), any())).thenReturn(getActorDefinitionVersion()); - when(actorDefinitionVersionHelper.getDestinationVersionWithOverrideStatus(any(), any(), any())).thenReturn(getActorDefinitionVersion()); - - // Mock license API responses - when(instanceConfigurationHandler.licenseInfo()).thenReturn(new LicenseInfoResponse() - .edition("pro") - .licenseStatus(LicenseStatus.PRO) - .expirationDate(OffsetDateTime.now().plusDays(10).toEpochSecond()) - .usedNodes(2)); - // Mock k8s responses - final Node node1 = new Node(); - final ObjectMeta metadata = new ObjectMeta(); - metadata.setName("node1"); - node1.setMetadata(metadata); - final NodeStatus status = new NodeStatus(); - final NodeCondition condition = new NodeCondition(); - condition.setType("Ready"); - condition.setStatus("true"); - final List conditions = List.of(condition); - status.setConditions(conditions); - final Map allocatable = new HashMap<>(); - allocatable.put("cpu", new Quantity("500m")); - allocatable.put("memory", new Quantity("1Gi")); - status.setAllocatable(allocatable); - node1.setStatus(status); - final NodeList nodeList = new NodeList(); - nodeList.setItems(List.of(node1)); - final NonNamespaceOperation op = mock(NonNamespaceOperation.class); - when(kubernetesClient.nodes()).thenReturn(op); - when(op.list()).thenReturn(nodeList); - - final Pod pod1 = new Pod(); - final ObjectMeta podMetadata = new ObjectMeta(); - podMetadata.setName("pod1"); - pod1.setMetadata(podMetadata); - final PodStatus podStatus = new PodStatus(); - podStatus.setPhase("Running"); - pod1.setStatus(podStatus); - - final PodSpec podSpec = new PodSpec(); - final Container container = new Container(); - container.setName("containerName"); - podSpec.setContainers(List.of(container)); - final ResourceRequirements requirements = new ResourceRequirements(); - final Map limits = new HashMap<>(); - limits.put("cpu", new Quantity("500m")); - limits.put("memory", new Quantity("1Gi")); - requirements.setLimits(limits); - container.setResources(requirements); - podSpec.setContainers(List.of(container)); - pod1.setSpec(podSpec); - - final PodList podList = new PodList(); - podList.setItems(List.of(pod1)); - - final MixedOperation mop = mock(MixedOperation.class); - final NonNamespaceOperation podNamespaceOperation = mock(NonNamespaceOperation.class); - when(kubernetesClient.pods()).thenReturn(mop); - when(mop.inNamespace("ab")).thenReturn(podNamespaceOperation); - when(podNamespaceOperation.list()).thenReturn(podList); - - } - - @Test - void testGenerateDiagnosticReport() throws IOException { - final File zipFile = diagnosticToolHandler.generateDiagnosticReport(); - Assertions.assertTrue(zipFile.exists()); - // Check the content of the zip file - try (final FileInputStream fis = new FileInputStream(zipFile); - final ZipInputStream zis = new ZipInputStream(fis)) { - - ZipEntry entry; - boolean foundInstanceYaml = false; - boolean foundDeploymentYaml = false; - - // Iterate through the entries in the zip - while ((entry = zis.getNextEntry()) != null) { - if (entry.getName().equals(AIRBYTE_INSTANCE_YAML)) { - foundInstanceYaml = true; - - // Check the content of airbyte_instance.yaml - final byte[] buffer = new byte[1024]; - int bytesRead; - final StringBuilder content = new StringBuilder(); - while ((bytesRead = zis.read(buffer)) != -1) { - content.append(new String(buffer, 0, bytesRead)); - } - // workspace information - Assertions.assertTrue(content.toString().contains("workspaces")); - Assertions.assertTrue(content.toString().contains("connections")); - Assertions.assertTrue(content.toString().contains("connectors")); - // license information - Assertions.assertTrue(content.toString().contains("license")); - Assertions.assertTrue(content.toString().contains("expiryDate")); - Assertions.assertTrue(content.toString().contains("usedNodes")); - } else if (entry.getName().equals(AIRBYTE_DEPLOYMENT_YAML)) { - foundDeploymentYaml = true; - - // Check the content of airbyte_deployment.yaml - final byte[] buffer = new byte[1024]; - int bytesRead; - final StringBuilder content = new StringBuilder(); - while ((bytesRead = zis.read(buffer)) != -1) { - content.append(new String(buffer, 0, bytesRead)); - } - // k8s information - Assertions.assertTrue(content.toString().contains("k8s")); - Assertions.assertTrue(content.toString().contains("nodes")); - Assertions.assertTrue(content.toString().contains("pods")); - } - } - - // Ensure all yaml files are present in the zip - Assertions.assertTrue(foundInstanceYaml); - Assertions.assertTrue(foundDeploymentYaml); - } - } - - private static @NotNull StandardWorkspace getStandardWorkspace() { - return new StandardWorkspace() - .withName("workspace1") - .withWorkspaceId(UUID.randomUUID()); - } - - private static StandardSync getStandardSync() { - return new StandardSync() - .withName("connection1") - .withStatus(StandardSync.Status.ACTIVE) - .withConnectionId(UUID.randomUUID()) - .withSourceId(UUID.randomUUID()) - .withDestinationId(UUID.randomUUID()); - } - - private static SourceConnection getSource() { - return new SourceConnection() - .withSourceId(UUID.randomUUID()) - .withName("source") - .withSourceDefinitionId(UUID.randomUUID()); - } - - private static ActorDefinitionVersionHelper.@NotNull ActorDefinitionVersionWithOverrideStatus getActorDefinitionVersion() { - return new ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus( - new ActorDefinitionVersion().withDockerImageTag("tag").withSupportState(ActorDefinitionVersion.SupportState.SUPPORTED), true); - } - - private static DestinationConnection getDestination() { - return new DestinationConnection().withDestinationId(UUID.randomUUID()).withName("destination1").withDestinationDefinitionId(UUID.randomUUID()); - } - -} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java index cc3940ebd33..c9122bfa211 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobHistoryHandlerTest.java @@ -47,6 +47,7 @@ import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.logging.LogClientManager; +import io.airbyte.commons.logging.LogEvents; import io.airbyte.commons.logging.LogUtils; import io.airbyte.commons.server.converters.ApiPojoConverters; import io.airbyte.commons.server.converters.JobConverter; @@ -88,7 +89,6 @@ import io.airbyte.persistence.job.JobPersistence; import io.airbyte.persistence.job.JobPersistence.AttemptStats; import io.airbyte.persistence.job.JobPersistence.JobAttemptPair; -import io.airbyte.persistence.job.WorkspaceHelper; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.nio.file.Path; @@ -215,13 +215,14 @@ class JobHistoryHandlerTest { private SourceHandler sourceHandler; private DestinationHandler destinationHandler; private Attempt testJobAttempt; + private JobConverter jobConverter; private JobPersistence jobPersistence; + private LogClientManager logClientManager; private LogUtils logUtils; private FeatureFlagClient featureFlagClient; private JobHistoryHandler jobHistoryHandler; private TemporalClient temporalClient; private JobService jobService; - private WorkspaceHelper workspaceHelper; private static JobRead toJobInfo(final Job job) { return new JobRead().id(job.getId()) @@ -276,10 +277,11 @@ void setUp() { sourceHandler = mock(SourceHandler.class); destinationHandler = mock(DestinationHandler.class); jobPersistence = mock(JobPersistence.class); + logClientManager = mock(LogClientManager.class); logUtils = mock(LogUtils.class); featureFlagClient = mock(TestClient.class); temporalClient = mock(TemporalClient.class); - workspaceHelper = mock(WorkspaceHelper.class); + jobConverter = new JobConverter(logClientManager, logUtils); final SourceDefinitionsHandler sourceDefinitionsHandler = mock(SourceDefinitionsHandler.class); final DestinationDefinitionsHandler destinationDefinitionsHandler = mock(DestinationDefinitionsHandler.class); final AirbyteVersion airbyteVersion = mock(AirbyteVersion.class); @@ -294,11 +296,9 @@ void setUp() { airbyteVersion, temporalClient, featureFlagClient, - mock(LogClientManager.class), + jobConverter, jobService, - apiPojoConverters, - logUtils, - workspaceHelper); + apiPojoConverters); } @Nested @@ -560,6 +560,7 @@ void testGetJobInfo() throws IOException { Job job = new Job(JOB_ID, JOB_CONFIG.getConfigType(), JOB_CONFIG_ID, JOB_CONFIG, List.of(testJobAttempt), JOB_STATUS, null, CREATED_AT, CREATED_AT); when(jobPersistence.getJob(JOB_ID)).thenReturn(job); + when(logClientManager.getLogs(any())).thenReturn(new LogEvents(List.of(), "1")); final JobIdRequestBody requestBody = new JobIdRequestBody().id(JOB_ID); final JobInfoRead jobInfoActual = jobHistoryHandler.getJobInfo(requestBody); @@ -615,6 +616,7 @@ void testGetDebugJobInfo() when(destinationHandler.getDestination(destinationIdRequestBody)).thenReturn(destinationRead); when(jobPersistence.getJob(JOB_ID)).thenReturn(job); when(jobPersistence.getAttemptStats(anyLong(), anyInt())).thenReturn(FIRST_ATTEMPT_STATS); + when(logClientManager.getLogs(any())).thenReturn(new LogEvents(List.of(), "1")); final JobIdRequestBody requestBody = new JobIdRequestBody().id(JOB_ID); final JobDebugInfoRead jobDebugInfoActual = jobHistoryHandler.getJobDebugInfo(requestBody); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java index f1093185028..feb72b5e244 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/JobInputHandlerTest.java @@ -45,6 +45,7 @@ import io.airbyte.data.exceptions.ConfigNotFoundException; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SourceService; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; @@ -101,6 +102,7 @@ class JobInputHandlerTest { private SourceService sourceService; private DestinationService destinatinonService; private ConnectionService connectionService; + private ScopedConfigurationService scopedConfigurationService; private final ApiPojoConverters apiPojoConverters = new ApiPojoConverters(new CatalogConverter(new FieldGenerator(), Collections.emptyList())); @@ -121,6 +123,7 @@ void init() throws IOException, JsonValidationException, ConfigNotFoundException sourceService = mock(SourceService.class); destinatinonService = mock(DestinationService.class); connectionService = mock(ConnectionService.class); + scopedConfigurationService = mock(ScopedConfigurationService.class); jobInputHandler = new JobInputHandler(jobPersistence, featureFlagClient, @@ -133,7 +136,8 @@ void init() throws IOException, JsonValidationException, ConfigNotFoundException connectionService, sourceService, destinatinonService, - apiPojoConverters); + apiPojoConverters, + scopedConfigurationService); when(jobPersistence.getJob(JOB_ID)).thenReturn(job); when(configInjector.injectConfig(any(), any())).thenAnswer(i -> i.getArguments()[0]); @@ -201,8 +205,8 @@ void testGetSyncWorkflowInput() throws JsonValidationException, ConfigNotFoundEx .withSourceConfiguration(SOURCE_CONFIG_WITH_OAUTH_AND_INJECTED_CONFIG) .withDestinationConfiguration(DESTINATION_CONFIG_WITH_OAUTH) .withIsReset(false) - .withUseAsyncReplicate(false) - .withUseAsyncActivities(false); + .withUseAsyncReplicate(true) + .withUseAsyncActivities(true); final JobRunConfig expectedJobRunConfig = new JobRunConfig() .withJobId(String.valueOf(JOB_ID)) @@ -277,8 +281,8 @@ void testGetResetSyncWorkflowInput() throws IOException { .withDestinationConfiguration(DESTINATION_CONFIG_WITH_OAUTH) .withWebhookOperationConfigs(jobResetConfig.getWebhookOperationConfigs()) .withIsReset(true) - .withUseAsyncReplicate(false) - .withUseAsyncActivities(false); + .withUseAsyncReplicate(true) + .withUseAsyncActivities(true); final JobRunConfig expectedJobRunConfig = new JobRunConfig() .withJobId(String.valueOf(JOB_ID)) diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java index de70088677c..d24e640abed 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java @@ -9,6 +9,7 @@ import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -19,6 +20,7 @@ import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody; import io.airbyte.api.model.generated.SetInstancewideSourceOauthParamsRequestBody; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.handlers.helpers.OAuthHelper; import io.airbyte.config.DestinationOAuthParameter; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ActorDefinitionVersionHelper; @@ -31,9 +33,9 @@ import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.featureflag.TestClient; +import io.airbyte.oauth.OAuthImplementationFactory; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; -import java.net.http.HttpClient; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -48,7 +50,7 @@ class OAuthHandlerTest { private OAuthHandler handler; private TrackingClient trackingClient; - private HttpClient httpClient; + private OAuthImplementationFactory oauthImplementationFactory; private SecretsRepositoryReader secretsRepositoryReader; private SecretsRepositoryWriter secretsRepositoryWriter; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; @@ -66,7 +68,7 @@ class OAuthHandlerTest { @BeforeEach public void init() { trackingClient = mock(TrackingClient.class); - httpClient = Mockito.mock(HttpClient.class); + oauthImplementationFactory = mock(OAuthImplementationFactory.class); secretsRepositoryReader = mock(SecretsRepositoryReader.class); secretsRepositoryWriter = mock(SecretsRepositoryWriter.class); actorDefinitionVersionHelper = mock(ActorDefinitionVersionHelper.class); @@ -77,7 +79,7 @@ public void init() { secretPersistenceConfigService = mock(SecretPersistenceConfigService.class); workspaceService = mock(WorkspaceService.class); handler = new OAuthHandler( - httpClient, + oauthImplementationFactory, trackingClient, secretsRepositoryWriter, actorDefinitionVersionHelper, @@ -281,12 +283,13 @@ void testCompleteSourceOAuthHandleReturnSecret() .sourceDefinitionId(sourceDefinitionId) .workspaceId(workspaceId); - final OAuthHandler handlerSpy = Mockito.spy(handler); + final OAuthHandler handlerSpy = spy(handler); doReturn( - handler.mapToCompleteOAuthResponse(Map.of("access_token", "access", "refresh_token", "refresh"))).when(handlerSpy).completeSourceOAuth(any()); + OAuthHelper.mapToCompleteOAuthResponse(Map.of("access_token", "access", "refresh_token", "refresh"))).when(handlerSpy) + .completeSourceOAuth(any()); doReturn( - handler.mapToCompleteOAuthResponse(Map.of("secret_id", "secret"))).when(handlerSpy).writeOAuthResponseSecret(any(), any()); + OAuthHelper.mapToCompleteOAuthResponse(Map.of("secret_id", "secret"))).when(handlerSpy).writeOAuthResponseSecret(any(), any()); handlerSpy.completeSourceOAuthHandleReturnSecret(completeSourceOauthRequest); @@ -313,7 +316,7 @@ void testCompleteSourceOAuthHandleReturnSecret() @Test void testGetSourceOAuthParamConfigNoFeatureFlag() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { + throws JsonValidationException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID sourceDefinitionId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); final SourceOAuthParameter sourceOAuthParameter = new SourceOAuthParameter() @@ -333,7 +336,7 @@ void testGetSourceOAuthParamConfigNoFeatureFlag() @Test void testGetSourceOAuthParamConfigFeatureFlagNoOverride() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { + throws JsonValidationException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID sourceDefinitionId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); final SourceOAuthParameter sourceOAuthParameter = new SourceOAuthParameter() diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java index 37f67ca1944..2eb3aff68ee 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java @@ -67,6 +67,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.lang.Exceptions; import io.airbyte.commons.logging.LogClientManager; +import io.airbyte.commons.logging.LogEvents; import io.airbyte.commons.logging.LogUtils; import io.airbyte.commons.server.converters.ConfigurationUpdate; import io.airbyte.commons.server.converters.JobConverter; @@ -129,7 +130,6 @@ import io.airbyte.persistence.job.JobNotifier; import io.airbyte.persistence.job.JobPersistence; import io.airbyte.persistence.job.WebUrlHelper; -import io.airbyte.persistence.job.WorkspaceHelper; import io.airbyte.persistence.job.factory.OAuthConfigSupplier; import io.airbyte.persistence.job.factory.SyncJobFactory; import io.airbyte.persistence.job.tracker.JobTracker; @@ -148,12 +148,12 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.stream.Stream; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; @@ -284,11 +284,9 @@ class SchedulerHandlerTest { private OperationService operationService; private final CatalogConverter catalogConverter = new CatalogConverter(new FieldGenerator(), Collections.emptyList()); private final ApplySchemaChangeHelper applySchemaChangeHelper = new ApplySchemaChangeHelper(catalogConverter); - private WorkspaceHelper workspaceHelper; @BeforeEach void setup() throws JsonValidationException, ConfigNotFoundException, IOException { - featureFlagClient = new TestClient(Map.of()); job = mock(Job.class, RETURNS_DEEP_STUBS); jobResponse = mock(SynchronousResponse.class, RETURNS_DEEP_STUBS); final SynchronousJobMetadata synchronousJobMetadata = mock(SynchronousJobMetadata.class); @@ -329,9 +327,9 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio connectorDefinitionSpecificationHandler = mock(ConnectorDefinitionSpecificationHandler.class); logClientManager = mock(LogClientManager.class); logUtils = mock(LogUtils.class); - workspaceHelper = mock(WorkspaceHelper.class); - jobConverter = spy(new JobConverter(featureFlagClient, logClientManager, logUtils, workspaceHelper)); + when(logClientManager.getLogs(any())).thenReturn(new LogEvents(List.of(), "1")); + jobConverter = spy(new JobConverter(logClientManager, logUtils)); featureFlagClient = mock(TestClient.class); workspaceService = mock(WorkspaceService.class); @@ -977,6 +975,7 @@ void testDiscoverSchemaForSourceFromSourceIdFailed(final boolean enabled) // TODO: to be removed once we swap to new discover flow @ParameterizedTest @ValueSource(booleans = {true, false}) + @Disabled("Delete along with the deletion of the pre-async discover code") void whenDiscoverPostprocessInTemporalEnabledDiffAndDisablingIsNotPerformed(final boolean enabled) throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.config.persistence.ConfigNotFoundException { when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); @@ -1052,6 +1051,7 @@ void whenDiscoverPostprocessInTemporalEnabledDiffAndDisablingIsNotPerformed(fina // TODO: to be removed once we swap to new discover flow @Test + @Disabled("Delete along with the deletion of the pre-async discover code") void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreaking() throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.config.persistence.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); @@ -1113,6 +1113,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreaking() // TODO: to be removed once we swap to new discover flow @Test + @Disabled("Delete along with the deletion of the pre-async discover code") void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceFeatureFlag() throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.config.persistence.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); @@ -1177,6 +1178,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP // TODO: to be removed once we swap to new discover flow @Test + @Disabled("Delete along with the deletion of the pre-async discover code") void testDiscoverSchemaFromSourceIdWithConnectionIdBreakingFeatureFlagOn() throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException, io.airbyte.config.persistence.ConfigNotFoundException { @@ -1243,6 +1245,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdBreakingFeatureFlagOn() // TODO: to be removed once we swap to new discover flow @Test + @Disabled("Delete along with the deletion of the pre-async discover code") void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceFeatureFlagNoDiff() throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.config.persistence.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); @@ -1304,6 +1307,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP // TODO: to be removed once we swap to new discover flow @Test + @Disabled("Delete along with the deletion of the pre-async discover code") void testDiscoverSchemaForSourceMultipleConnectionsFeatureFlagOn() throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.config.persistence.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); @@ -1393,6 +1397,7 @@ void testDiscoverSchemaForSourceMultipleConnectionsFeatureFlagOn() } @Test + @Disabled("Delete along with the deletion of the pre-async discover code") void testDiscoverSchemaFromSourceIdWithConnectionUpdateNonSuccessResponse() throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.config.persistence.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java index ca15ceab41a..094dfebea31 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java @@ -117,6 +117,8 @@ import io.airbyte.data.services.shared.StandardSyncQuery; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.TestClient; +import io.airbyte.mappers.transformations.DestinationCatalogGenerator; +import io.airbyte.mappers.transformations.DestinationCatalogGenerator.CatalogGenerationResult; import io.airbyte.persistence.job.factory.OAuthConfigSupplier; import io.airbyte.protocol.models.CatalogHelpers; import io.airbyte.protocol.models.ConnectorSpecification; @@ -130,6 +132,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; @@ -168,6 +171,7 @@ class WebBackendConnectionsHandlerTest { private WorkspaceService workspaceService; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; private ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; + private DestinationCatalogGenerator destinationCatalogGenerator; private final FeatureFlagClient featureFlagClient = mock(TestClient.class); private final FieldGenerator fieldGenerator = new FieldGenerator(); private final CatalogConverter catalogConverter = new CatalogConverter(new FieldGenerator(), Collections.emptyList()); @@ -197,6 +201,7 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio eventRunner = mock(EventRunner.class); actorDefinitionVersionHelper = mock(ActorDefinitionVersionHelper.class); actorDefinitionHandlerHelper = mock(ActorDefinitionHandlerHelper.class); + destinationCatalogGenerator = mock(DestinationCatalogGenerator.class); final JsonSchemaValidator validator = mock(JsonSchemaValidator.class); final JsonSecretsProcessor secretsProcessor = mock(JsonSecretsProcessor.class); @@ -261,7 +266,8 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio sourceService, workspaceService, catalogConverter, applySchemaChangeHelper, - apiPojoConverters)); + apiPojoConverters, + destinationCatalogGenerator)); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(UUID.randomUUID()) @@ -309,6 +315,9 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio when(actorDefinitionVersionHelper.getSourceVersion(any(), any(), any())).thenReturn(mockADV); when(actorDefinitionVersionHelper.getDestinationVersion(any(), any(), any())).thenReturn(mockADV); + when(destinationCatalogGenerator.generateDestinationCatalog(any())) + .thenAnswer(invocation -> new CatalogGenerationResult(invocation.getArgument(0), Map.of())); + connectionRead = ConnectionHelpers.generateExpectedConnectionRead(standardSync); brokenConnectionRead = ConnectionHelpers.generateExpectedConnectionRead(brokenStandardSync); operationReadList = new OperationReadList() @@ -1038,6 +1047,9 @@ void testUpdateConnectionWithUpdatedSchemaPerStream(final Boolean useRefresh) assertEquals(expectedWithNewSchema.getSyncCatalog(), result.getSyncCatalog()); + verify(destinationCatalogGenerator).generateDestinationCatalog(catalogConverter.toConfiguredInternal(expectedWithNewSchema.getSyncCatalog())); + verify(destinationCatalogGenerator).generateDestinationCatalog(ConnectionHelpers.generateBasicConfiguredAirbyteCatalog()); + final ConnectionIdRequestBody connectionId = new ConnectionIdRequestBody().connectionId(result.getConnectionId()); verify(schedulerHandler, times(0)).resetConnection(connectionId); verify(schedulerHandler, times(0)).syncConnection(connectionId); diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java index 1cd436c0460..56bf709b14a 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WorkspacesHandlerTest.java @@ -173,8 +173,6 @@ private Organization generateOrganization(final String ssoRealm) { .withOrganizationId(ORGANIZATION_ID) .withName(TEST_ORGANIZATION_NAME) .withEmail(TEST_EMAIL) - .withPba(false) - .withOrgLevelBilling(false) .withSsoRealm(ssoRealm); } @@ -598,7 +596,6 @@ void testGetWorkspaceOrganizationInfo(final Boolean isSso) throws IOException, C assertEquals(organization.getOrganizationId(), orgInfo.getOrganizationId()); assertEquals(organization.getName(), orgInfo.getOrganizationName()); - assertEquals(organization.getPba(), orgInfo.getPba()); assertEquals(isSso, orgInfo.getSso()); // sso is true if ssoRealm is set } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/OAuthHelperTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/OAuthHelperTest.java new file mode 100644 index 00000000000..0c9afa3e970 --- /dev/null +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/OAuthHelperTest.java @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.server.handlers.helpers; + +import com.fasterxml.jackson.databind.JsonNode; +import io.airbyte.commons.json.Jsons; +import io.airbyte.protocol.models.OAuthConfigSpecification; +import java.util.List; +import java.util.Map; +import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +class OAuthHelperTest { + + @Test + void testExtract() { + final JsonNode input = Jsons.deserialize(""" + { + "type": "object", + "additionalProperties": false, + "properties": { + "tenant_id": { + "type": "string", + "path_in_connector_config": ["tenant_id"] + }, + "another_property": { + "type": "string", + "path_in_connector_config": ["another", "property"] + } + } + } + """); + + final Map> expected = Map.ofEntries( + Map.entry("tenant_id", List.of("tenant_id")), + Map.entry("another_property", List.of("another", "property"))); + + Assertions.assertThat(OAuthHelper.extractOauthConfigurationPaths(input)) + .containsExactlyInAnyOrderEntriesOf(expected); + } + + @Test + void testUpdateOauthConfigToAcceptAdditionalUserInputProperties() { + final OAuthConfigSpecification input = Jsons.object( + Jsons.deserialize(""" + { + "complete_oauth_output_specification": {}, + "complete_oauth_server_input_specification": {}, + "complete_oauth_server_output_specification": {}, + "oauth_connector_input_specification": {}, + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": false, + "properties": { + "subdomain": { + "type": "string", + "path_in_connector_config": ["credentials", "subdomain"] + } + } + } + } + """), + OAuthConfigSpecification.class); + + final OAuthConfigSpecification expected = Jsons.object( + Jsons.deserialize(""" + { + "complete_oauth_output_specification": {}, + "complete_oauth_server_input_specification": {}, + "complete_oauth_server_output_specification": {}, + "oauth_connector_input_specification": {}, + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": true, + "properties": { + "subdomain": { + "type": "string", + "path_in_connector_config": ["credentials", "subdomain"] + } + } + } + } + """), + OAuthConfigSpecification.class); + OAuthHelper.updateOauthConfigToAcceptAdditionalUserInputProperties(input); + Assertions.assertThat(input).isEqualTo(expected); + } + + @ParameterizedTest + @ValueSource(strings = { + "{\"oauth_user_input_from_connector_config_specification\": {}}", + "{\"oauth_user_input_from_connector_config_specification\": null}", + "{}" + }) + void testUpdateOauthConfigToAcceptAdditionalUserInputPropertiesHandlesEdgeCases(final String jsonStringConfig) { + final OAuthConfigSpecification input = Jsons.object( + Jsons.deserialize(jsonStringConfig), + OAuthConfigSpecification.class); + + final OAuthConfigSpecification expected = Jsons.object( + Jsons.deserialize(""" + { + "oauth_user_input_from_connector_config_specification": { + "type": "object", + "additionalProperties": true + } + } + """), + OAuthConfigSpecification.class); + + OAuthHelper.updateOauthConfigToAcceptAdditionalUserInputProperties(input); + + Assertions.assertThat(input).isEqualTo(expected); + } + +} diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractorTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractorTest.java deleted file mode 100644 index 8794b18bc72..00000000000 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/helpers/OAuthPathExtractorTest.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.server.handlers.helpers; - -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.commons.json.Jsons; -import java.util.List; -import java.util.Map; -import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.Test; - -class OAuthPathExtractorTest { - - @Test - void testExtract() { - final JsonNode input = Jsons.deserialize(""" - { - "type": "object", - "additionalProperties": false, - "properties": { - "tenant_id": { - "type": "string", - "path_in_connector_config": ["tenant_id"] - }, - "another_property": { - "type": "string", - "path_in_connector_config": ["another", "property"] - } - } - } - """); - - final Map> expected = Map.ofEntries( - Map.entry("tenant_id", List.of("tenant_id")), - Map.entry("another_property", List.of("another", "property"))); - - Assertions.assertThat(OAuthPathExtractor.extractOauthConfigurationPaths(input)) - .containsExactlyInAnyOrderEntriesOf(expected); - } - -} diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelperTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelperTest.kt index 7160802e63b..5f791977978 100644 --- a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelperTest.kt +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/authorization/ApiAuthorizationHelperTest.kt @@ -48,7 +48,7 @@ class ApiAuthorizationHelperTest { val scope = Scope.WORKSPACE val permissionTypes = setOf(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR) assertDoesNotThrow { - apiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) + apiAuthorizationHelper.checkWorkspacesPermissions(ids, scope, userId, permissionTypes) } } @@ -62,12 +62,12 @@ class ApiAuthorizationHelperTest { if (scope == Scope.WORKSPACES) { // Allow empty ids for WORKSPACES scope specifically assertDoesNotThrow { - apiAuthorizationHelper.checkWorkspacePermissions(emptyList(), scope, userId, permissionTypes) + apiAuthorizationHelper.checkWorkspacesPermissions(emptyList(), scope, userId, permissionTypes) } } else { // Disallow empty ids for other scopes assertThrows { - apiAuthorizationHelper.checkWorkspacePermissions(emptyList(), scope, userId, permissionTypes) + apiAuthorizationHelper.checkWorkspacesPermissions(emptyList(), scope, userId, permissionTypes) } } } @@ -84,7 +84,7 @@ class ApiAuthorizationHelperTest { every { authenticationHeaderResolver.resolveWorkspace(any()) } returns null assertThrows { - apiAuthorizationHelper.checkWorkspacePermissions(ids, Scope.WORKSPACE, userId, permissionTypes) + apiAuthorizationHelper.checkWorkspacesPermissions(ids, Scope.WORKSPACE, userId, permissionTypes) } } @@ -104,7 +104,7 @@ class ApiAuthorizationHelperTest { ) assertDoesNotThrow { - apiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) + apiAuthorizationHelper.checkWorkspacesPermissions(ids, scope, userId, permissionTypes) } // if no permission types pass, we fail the overall check @@ -115,7 +115,7 @@ class ApiAuthorizationHelperTest { ) assertThrows { - apiAuthorizationHelper.checkWorkspacePermissions(ids, scope, userId, permissionTypes) + apiAuthorizationHelper.checkWorkspacesPermissions(ids, scope, userId, permissionTypes) } } diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt index 0d0f9a04001..6970af5c021 100644 --- a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt @@ -26,8 +26,10 @@ import io.airbyte.data.helpers.ActorDefinitionVersionUpdater import io.airbyte.data.services.ActorDefinitionService import io.airbyte.data.services.ConnectorRolloutService import io.airbyte.data.services.ScopedConfigurationService +import io.mockk.Runs import io.mockk.clearAllMocks import io.mockk.every +import io.mockk.just import io.mockk.mockk import io.mockk.verify import io.mockk.verifyAll @@ -111,6 +113,14 @@ internal class ConnectorRolloutHandlerTest { @JvmStatic fun workflowStartedInProgress() = listOf(ConnectorEnumRolloutState.WORKFLOW_STARTED, ConnectorEnumRolloutState.IN_PROGRESS, ConnectorEnumRolloutState.PAUSED) + + @JvmStatic + fun provideConnectorRolloutStateTerminalNonCanceled(): List { + return listOf( + ConnectorRolloutStateTerminal.SUCCEEDED, + ConnectorRolloutStateTerminal.FAILED_ROLLED_BACK, + ) + } } @BeforeEach @@ -125,14 +135,18 @@ internal class ConnectorRolloutHandlerTest { every { connectorRolloutService.getConnectorRollout(rolloutId) } returns expectedRollout every { actorDefinitionService.getActorDefinitionVersion(any()) } returns createMockActorDefinitionVersion() + every { rolloutActorFinder.getActorSelectionInfo(any(), any()) } returns ActorSelectionInfo(listOf(), 0, 0, 0, 0, 0) + every { rolloutActorFinder.getSyncInfoForPinnedActors(any()) } returns emptyMap() val rolloutRead = connectorRolloutHandler.getConnectorRollout(rolloutId) - assertEquals(connectorRolloutHandler.buildConnectorRolloutRead(expectedRollout), rolloutRead) + assertEquals(connectorRolloutHandler.buildConnectorRolloutRead(expectedRollout, true), rolloutRead) verify { connectorRolloutService.getConnectorRollout(rolloutId) actorDefinitionService.getActorDefinitionVersion(any()) + rolloutActorFinder.getActorSelectionInfo(any(), any()) + rolloutActorFinder.getSyncInfoForPinnedActors(any()) } } @@ -214,6 +228,142 @@ internal class ConnectorRolloutHandlerTest { rolloutStrategy = rolloutStrategy, errorMsg = errorMsg, failedReason = failedReason, + retainPinsOnCancellation = true, + ) + every { connectorRolloutService.getConnectorRollout(any()) } returns initialRollout + every { actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion(any(), any()) } returns Unit + + val finalizedRollout = connectorRolloutHandler.getAndValidateFinalizeRequest(connectorRolloutFinalizeRequestBody) + + verify { + actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion(any(), any()) + } + + assertEquals(state.toString(), finalizedRollout.state.toString()) + assertEquals(rolloutStrategy.toString(), finalizedRollout.rolloutStrategy.toString()) + assertEquals(errorMsg, finalizedRollout.errorMsg) + assertEquals(failedReason, finalizedRollout.failedReason) + assertTrue( + finalizedRollout.updatedAt >= initialRollout.updatedAt, + "updatedAt from finalizedRollout should be more recent than updatedAt from initialRollout", + ) + } + + @Test + fun `test finalizeConnectorRollout with retainPinsOnCancellation and canceled`() { + val rolloutId = UUID.randomUUID() + val state = ConnectorRolloutStateTerminal.CANCELED + val rolloutStrategy = ConnectorRolloutStrategy.MANUAL + val errorMsg = "error" + val failedReason = "failure" + val actorDefinitionId = UUID.randomUUID() + val releaseCandidateVersionId = UUID.randomUUID() + + val initialRollout = + createMockConnectorRollout( + id = rolloutId, + actorDefinitionId = actorDefinitionId, + releaseCandidateVersionId = releaseCandidateVersionId, + ) + + val connectorRolloutFinalizeRequestBody = + createMockConnectorRolloutFinalizeRequestBody( + rolloutId = rolloutId, + state = state, + rolloutStrategy = rolloutStrategy, + errorMsg = errorMsg, + failedReason = failedReason, + retainPinsOnCancellation = true, + ) + every { connectorRolloutService.getConnectorRollout(any()) } returns initialRollout + every { actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion(any(), any()) } returns Unit + + val finalizedRollout = connectorRolloutHandler.getAndValidateFinalizeRequest(connectorRolloutFinalizeRequestBody) + + verify(exactly = 0) { + actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion(any(), any()) + } + + assertEquals(state.toString(), finalizedRollout.state.toString()) + assertEquals(rolloutStrategy.toString(), finalizedRollout.rolloutStrategy.toString()) + assertEquals(errorMsg, finalizedRollout.errorMsg) + assertEquals(failedReason, finalizedRollout.failedReason) + assertTrue( + finalizedRollout.updatedAt >= initialRollout.updatedAt, + "updatedAt from finalizedRollout should be more recent than updatedAt from initialRollout", + ) + } + + @Test + fun `test finalizeConnectorRollout without retainPinsOnCancellation and canceled`() { + val rolloutId = UUID.randomUUID() + val state = ConnectorRolloutStateTerminal.CANCELED + val rolloutStrategy = ConnectorRolloutStrategy.MANUAL + val errorMsg = "error" + val failedReason = "failure" + val actorDefinitionId = UUID.randomUUID() + val releaseCandidateVersionId = UUID.randomUUID() + + val initialRollout = + createMockConnectorRollout( + id = rolloutId, + actorDefinitionId = actorDefinitionId, + releaseCandidateVersionId = releaseCandidateVersionId, + ) + + val connectorRolloutFinalizeRequestBody = + createMockConnectorRolloutFinalizeRequestBody( + rolloutId = rolloutId, + state = state, + rolloutStrategy = rolloutStrategy, + errorMsg = errorMsg, + failedReason = failedReason, + retainPinsOnCancellation = false, + ) + every { connectorRolloutService.getConnectorRollout(any()) } returns initialRollout + every { actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion(any(), any()) } returns Unit + + val finalizedRollout = connectorRolloutHandler.getAndValidateFinalizeRequest(connectorRolloutFinalizeRequestBody) + + verify { + actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion(any(), any()) + } + + assertEquals(state.toString(), finalizedRollout.state.toString()) + assertEquals(rolloutStrategy.toString(), finalizedRollout.rolloutStrategy.toString()) + assertEquals(errorMsg, finalizedRollout.errorMsg) + assertEquals(failedReason, finalizedRollout.failedReason) + assertTrue( + finalizedRollout.updatedAt >= initialRollout.updatedAt, + "updatedAt from finalizedRollout should be more recent than updatedAt from initialRollout", + ) + } + + @ParameterizedTest + @MethodSource("provideConnectorRolloutStateTerminalNonCanceled") + fun `test finalizeConnectorRollout with retainPinsOnCancellation and not canceled is ignored`(state: ConnectorRolloutStateTerminal) { + val rolloutId = UUID.randomUUID() + val rolloutStrategy = ConnectorRolloutStrategy.MANUAL + val errorMsg = "error" + val failedReason = "failure" + val actorDefinitionId = UUID.randomUUID() + val releaseCandidateVersionId = UUID.randomUUID() + + val initialRollout = + createMockConnectorRollout( + id = rolloutId, + actorDefinitionId = actorDefinitionId, + releaseCandidateVersionId = releaseCandidateVersionId, + ) + + val connectorRolloutFinalizeRequestBody = + createMockConnectorRolloutFinalizeRequestBody( + rolloutId = rolloutId, + state = state, + rolloutStrategy = rolloutStrategy, + errorMsg = errorMsg, + failedReason = failedReason, + retainPinsOnCancellation = true, ) every { connectorRolloutService.getConnectorRollout(any()) } returns initialRollout every { actorDefinitionVersionUpdater.removeReleaseCandidatePinsForVersion(any(), any()) } returns Unit @@ -255,7 +405,7 @@ internal class ConnectorRolloutHandlerTest { val rolloutReads = connectorRolloutHandler.listConnectorRollouts(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) - assertEquals(expectedRollouts.map { connectorRolloutHandler.buildConnectorRolloutRead(it) }, rolloutReads) + assertEquals(expectedRollouts.map { connectorRolloutHandler.buildConnectorRolloutRead(it, false) }, rolloutReads) verify { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) @@ -277,7 +427,7 @@ internal class ConnectorRolloutHandlerTest { val rolloutReads = connectorRolloutHandler.listConnectorRollouts(ACTOR_DEFINITION_ID) - assertEquals(expectedRollouts.map { connectorRolloutHandler.buildConnectorRolloutRead(it) }, rolloutReads) + assertEquals(expectedRollouts.map { connectorRolloutHandler.buildConnectorRolloutRead(it, false) }, rolloutReads) verify { connectorRolloutService.listConnectorRollouts(ACTOR_DEFINITION_ID) @@ -303,7 +453,7 @@ internal class ConnectorRolloutHandlerTest { val rolloutReads = connectorRolloutHandler.listConnectorRollouts() - assertEquals(expectedRollouts.map { connectorRolloutHandler.buildConnectorRolloutRead(it) }, rolloutReads) + assertEquals(expectedRollouts.map { connectorRolloutHandler.buildConnectorRolloutRead(it, false) }, rolloutReads) verify { connectorRolloutService.listConnectorRollouts() @@ -359,6 +509,71 @@ internal class ConnectorRolloutHandlerTest { verify { actorDefinitionService.getActorDefinitionVersion(actorDefinitionId, DOCKER_IMAGE_TAG) } } + @Test + fun `test startConnectorRollout with pin migration`() { + val rolloutId = UUID.randomUUID() + val connectorRollout = createMockConnectorRollout(rolloutId).apply { this.state = state } + + every { connectorRolloutService.getConnectorRollout(rolloutId) } returns connectorRollout + every { connectorRolloutService.listConnectorRollouts(any()) } returns emptyList() + every { rolloutActorFinder.getActorSelectionInfo(any(), any()) } returns ActorSelectionInfo(listOf(), 0, 0, 0, 0, 0) + every { rolloutActorFinder.getSyncInfoForPinnedActors(any()) } returns emptyMap() + every { actorDefinitionService.getActorDefinitionVersion(any()) } returns createMockActorDefinitionVersion() + every { actorDefinitionVersionUpdater.migrateReleaseCandidatePins(any(), any(), any(), any()) } just Runs + every { connectorRolloutService.writeConnectorRollout(any()) } returns connectorRollout + + connectorRolloutHandler.startConnectorRollout( + ConnectorRolloutStartRequestBody() + .id(rolloutId) + .workflowRunId(UUID.randomUUID().toString()) + .rolloutStrategy(ConnectorRolloutStrategy.MANUAL) + .migratePins(true), + ) + + verify { + connectorRolloutService.getConnectorRollout(rolloutId) + actorDefinitionVersionUpdater.migrateReleaseCandidatePins(any(), any(), any(), any()) + rolloutActorFinder.getActorSelectionInfo(any(), any()) + rolloutActorFinder.getSyncInfoForPinnedActors(any()) + actorDefinitionService.getActorDefinitionVersion(any()) + connectorRolloutService.listConnectorRollouts(any()) + connectorRolloutService.writeConnectorRollout(any()) + } + } + + @Test + fun `test startConnectorRollout without pin migration`() { + val rolloutId = UUID.randomUUID() + val connectorRollout = createMockConnectorRollout(rolloutId).apply { this.state = state } + + every { connectorRolloutService.getConnectorRollout(rolloutId) } returns connectorRollout + every { connectorRolloutService.listConnectorRollouts(any()) } returns emptyList() + every { rolloutActorFinder.getActorSelectionInfo(any(), any()) } returns ActorSelectionInfo(listOf(), 0, 0, 0, 0, 0) + every { rolloutActorFinder.getSyncInfoForPinnedActors(any()) } returns emptyMap() + every { actorDefinitionService.getActorDefinitionVersion(any()) } returns createMockActorDefinitionVersion() + every { connectorRolloutService.writeConnectorRollout(any()) } returns connectorRollout + + connectorRolloutHandler.startConnectorRollout( + ConnectorRolloutStartRequestBody() + .id(rolloutId) + .workflowRunId(UUID.randomUUID().toString()) + .rolloutStrategy(ConnectorRolloutStrategy.MANUAL) + .migratePins(false), + ) + + verify(exactly = 0) { + actorDefinitionVersionUpdater.migrateReleaseCandidatePins(any(), any(), any(), any()) + connectorRolloutService.listConnectorRollouts(any()) + } + verify { + connectorRolloutService.getConnectorRollout(rolloutId) + rolloutActorFinder.getActorSelectionInfo(any(), any()) + rolloutActorFinder.getSyncInfoForPinnedActors(any()) + actorDefinitionService.getActorDefinitionVersion(any()) + connectorRolloutService.writeConnectorRollout(any()) + } + } + @ParameterizedTest @MethodSource("validStartStates") fun `test getAndValidateStartRequest with initialized state`(state: ConnectorEnumRolloutState) { @@ -384,8 +599,10 @@ internal class ConnectorRolloutHandlerTest { @ParameterizedTest @MethodSource("invalidStartStates") - fun `test getAndValidateStartRequest with invalid state throws exception`(state: ConnectorEnumRolloutState) { + fun `test getAndValidateStartRequest adds new information to existing rollout`(state: ConnectorEnumRolloutState) { val rolloutId = UUID.randomUUID() + val runId = UUID.randomUUID().toString() + val rolloutStrategy = ConnectorRolloutStrategy.MANUAL val connectorRollout = createMockConnectorRollout(rolloutId).apply { this.state = state @@ -393,10 +610,16 @@ internal class ConnectorRolloutHandlerTest { every { connectorRolloutService.getConnectorRollout(any()) } returns connectorRollout - assertThrows { - connectorRolloutHandler.getAndValidateStartRequest(createMockConnectorRolloutStartRequestBody()) - } + val result = + connectorRolloutHandler.getAndValidateStartRequest( + ConnectorRolloutStartRequestBody() + .id(rolloutId) + .workflowRunId(runId) + .rolloutStrategy(rolloutStrategy), + ) + assertEquals(connectorRollout.workflowRunId, result.workflowRunId) + assertEquals(rolloutStrategy.toString(), result.rolloutStrategy.toString()) verify { connectorRolloutService.getConnectorRollout(any()) } @@ -598,6 +821,7 @@ internal class ConnectorRolloutHandlerTest { ConnectorRolloutStrategy.MANUAL, "No error", "No failure", + true, ), ) @@ -628,6 +852,7 @@ internal class ConnectorRolloutHandlerTest { ConnectorRolloutStrategy.MANUAL, "No error", "No failure", + true, ), ) } @@ -706,6 +931,7 @@ internal class ConnectorRolloutHandlerTest { actorDefinitionId = ACTOR_DEFINITION_ID updatedBy = UPDATED_BY rolloutStrategy = ConnectorRolloutStrategy.MANUAL + migratePins = false } val connectorRollout = createMockConnectorRollout(rolloutId) @@ -719,6 +945,8 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(any()) } returns Optional.of(createMockActorDefinitionVersion()) + every { rolloutActorFinder.getActorSelectionInfo(any(), any()) } returns ActorSelectionInfo(listOf(), 0, 0, 0, 0, 0) + every { rolloutActorFinder.getSyncInfoForPinnedActors(any()) } returns emptyMap() val result = connectorRolloutHandler.manualStartConnectorRollout(connectorRolloutWorkflowStart) @@ -730,6 +958,8 @@ internal class ConnectorRolloutHandlerTest { actorDefinitionService.getActorDefinitionVersion(any()) actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(any()) + rolloutActorFinder.getActorSelectionInfo(any(), any()) + rolloutActorFinder.getSyncInfoForPinnedActors(any()) } } @@ -787,6 +1017,8 @@ internal class ConnectorRolloutHandlerTest { every { connectorRolloutClient.doRollout(any()) } returns ConnectorRolloutOutput(state = ConnectorEnumRolloutState.IN_PROGRESS) every { connectorRolloutService.getConnectorRollout(rolloutId) } returns connectorRollout every { actorDefinitionService.getActorDefinitionVersion(any()) } returns createMockActorDefinitionVersion() + every { rolloutActorFinder.getActorSelectionInfo(any(), any()) } returns ActorSelectionInfo(listOf(), 0, 0, 0, 0, 0) + every { rolloutActorFinder.getSyncInfoForPinnedActors(any()) } returns emptyMap() val result = connectorRolloutHandler.manualDoConnectorRolloutUpdate(connectorRolloutWorkflowUpdate) @@ -796,6 +1028,8 @@ internal class ConnectorRolloutHandlerTest { connectorRolloutClient.doRollout(any()) connectorRolloutService.getConnectorRollout(rolloutId) actorDefinitionService.getActorDefinitionVersion(any()) + rolloutActorFinder.getActorSelectionInfo(any(), any()) + rolloutActorFinder.getSyncInfoForPinnedActors(any()) } } @@ -819,6 +1053,48 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(any()) } returns createMockActorDefinitionVersion() if (initialState == ConnectorEnumRolloutState.INITIALIZED) { every { connectorRolloutClient.startRollout(any()) } returns ConnectorRolloutOutput(state = ConnectorEnumRolloutState.WORKFLOW_STARTED) + every { rolloutActorFinder.getActorSelectionInfo(any(), any()) } returns ActorSelectionInfo(listOf(), 0, 0, 0, 0, 0) + every { rolloutActorFinder.getSyncInfoForPinnedActors(any()) } returns emptyMap() + } + every { connectorRolloutClient.finalizeRollout(any()) } returns Unit + + connectorRolloutHandler.manualFinalizeConnectorRollout(connectorRolloutFinalizeWorkflowUpdate) + + verifyAll { + connectorRolloutService.getConnectorRollout(any()) + actorDefinitionService.getActorDefinitionVersion(any()) + if (initialState == ConnectorEnumRolloutState.INITIALIZED) { + connectorRolloutClient.startRollout(any()) + rolloutActorFinder.getActorSelectionInfo(any(), any()) + rolloutActorFinder.getSyncInfoForPinnedActors(any()) + } + connectorRolloutClient.finalizeRollout(any()) + } + } + + @ParameterizedTest + @MethodSource("validFinalizeStates") + fun `test manualFinalizeConnectorRollout with pin retention`(initialState: ConnectorEnumRolloutState) { + val rolloutId = UUID.randomUUID() + val rollout = createMockConnectorRollout(rolloutId) + rollout.apply { state = initialState } + val state = ConnectorRolloutStateTerminal.SUCCEEDED + val connectorRolloutFinalizeWorkflowUpdate = + ConnectorRolloutManualFinalizeRequestBody().apply { + dockerRepository = "airbyte/source-faker" + dockerImageTag = "0.1" + actorDefinitionId = UUID.randomUUID() + id = rolloutId + this.state = state + retainPinsOnCancellation = true + } + + every { connectorRolloutService.getConnectorRollout(any()) } returns rollout + every { actorDefinitionService.getActorDefinitionVersion(any()) } returns createMockActorDefinitionVersion() + if (initialState == ConnectorEnumRolloutState.INITIALIZED) { + every { connectorRolloutClient.startRollout(any()) } returns ConnectorRolloutOutput(state = ConnectorEnumRolloutState.WORKFLOW_STARTED) + every { rolloutActorFinder.getActorSelectionInfo(any(), any()) } returns ActorSelectionInfo(listOf(), 0, 0, 0, 0, 0) + every { rolloutActorFinder.getSyncInfoForPinnedActors(any()) } returns emptyMap() } every { connectorRolloutClient.finalizeRollout(any()) } returns Unit @@ -829,6 +1105,8 @@ internal class ConnectorRolloutHandlerTest { actorDefinitionService.getActorDefinitionVersion(any()) if (initialState == ConnectorEnumRolloutState.INITIALIZED) { connectorRolloutClient.startRollout(any()) + rolloutActorFinder.getActorSelectionInfo(any(), any()) + rolloutActorFinder.getSyncInfoForPinnedActors(any()) } connectorRolloutClient.finalizeRollout(any()) } @@ -859,6 +1137,8 @@ internal class ConnectorRolloutHandlerTest { dockerImageTag, UPDATED_BY, ConnectorRolloutStrategy.MANUAL, + null, + null, ) assertEquals(connectorRollout.id, result.id) @@ -894,6 +1174,8 @@ internal class ConnectorRolloutHandlerTest { dockerImageTag, UPDATED_BY, ConnectorRolloutStrategy.MANUAL, + null, + null, ) } } @@ -919,6 +1201,8 @@ internal class ConnectorRolloutHandlerTest { DOCKER_IMAGE_TAG, UPDATED_BY, ConnectorRolloutStrategy.MANUAL, + null, + null, ) verifyAll { @@ -940,6 +1224,8 @@ internal class ConnectorRolloutHandlerTest { DOCKER_IMAGE_TAG, UPDATED_BY, ConnectorRolloutStrategy.MANUAL, + null, + null, ) } @@ -960,6 +1246,8 @@ internal class ConnectorRolloutHandlerTest { DOCKER_IMAGE_TAG, UPDATED_BY, ConnectorRolloutStrategy.MANUAL, + null, + null, ) } @@ -1013,6 +1301,7 @@ internal class ConnectorRolloutHandlerTest { rolloutStrategy: ConnectorRolloutStrategy, errorMsg: String, failedReason: String, + retainPinsOnCancellation: Boolean, ): ConnectorRolloutFinalizeRequestBody = ConnectorRolloutFinalizeRequestBody() .id(rolloutId) @@ -1020,6 +1309,7 @@ internal class ConnectorRolloutHandlerTest { .rolloutStrategy(rolloutStrategy) .errorMsg(errorMsg) .failedReason(failedReason) + .retainPinsOnCancellation(retainPinsOnCancellation) private fun createMockActorDefinitionVersion(): ActorDefinitionVersion = ActorDefinitionVersion() diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/DiagnosticToolHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/DiagnosticToolHandlerTest.kt new file mode 100644 index 00000000000..0af80cffb22 --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/DiagnosticToolHandlerTest.kt @@ -0,0 +1,253 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.commons.server.handlers + +import io.airbyte.api.model.generated.LicenseInfoResponse +import io.airbyte.api.model.generated.LicenseStatus +import io.airbyte.commons.csp.CheckResult +import io.airbyte.commons.csp.CspChecker +import io.airbyte.commons.csp.Storage +import io.airbyte.commons.storage.StorageType +import io.airbyte.config.ActorDefinitionVersion +import io.airbyte.config.DestinationConnection +import io.airbyte.config.SourceConnection +import io.airbyte.config.StandardSync +import io.airbyte.config.StandardWorkspace +import io.airbyte.config.persistence.ActorDefinitionVersionHelper +import io.airbyte.config.persistence.ActorDefinitionVersionHelper.ActorDefinitionVersionWithOverrideStatus +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.DestinationService +import io.airbyte.data.services.SourceService +import io.airbyte.data.services.WorkspaceService +import io.fabric8.kubernetes.api.model.Container +import io.fabric8.kubernetes.api.model.Node +import io.fabric8.kubernetes.api.model.NodeCondition +import io.fabric8.kubernetes.api.model.NodeList +import io.fabric8.kubernetes.api.model.NodeStatus +import io.fabric8.kubernetes.api.model.ObjectMeta +import io.fabric8.kubernetes.api.model.Pod +import io.fabric8.kubernetes.api.model.PodList +import io.fabric8.kubernetes.api.model.PodSpec +import io.fabric8.kubernetes.api.model.PodStatus +import io.fabric8.kubernetes.api.model.Quantity +import io.fabric8.kubernetes.api.model.ResourceRequirements +import io.fabric8.kubernetes.client.KubernetesClient +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test +import java.io.File +import java.io.FileInputStream +import java.time.OffsetDateTime +import java.util.UUID +import java.util.zip.ZipEntry +import java.util.zip.ZipInputStream + +/** + * DiagnosticToolHandlerTest. + */ +internal class DiagnosticToolHandlerTest { + @Test + fun testGenerateDiagnosticReport() { + val diagnosticToolHandler = mockDiagnosticToolHandler() + val zipFile: File? = diagnosticToolHandler.generateDiagnosticReport() + Assertions.assertTrue(zipFile!!.exists()) + FileInputStream(zipFile).use { fis -> + ZipInputStream(fis).use { zis -> + var entry: ZipEntry? + var foundInstanceYaml: Boolean = false + var foundDeploymentYaml: Boolean = false + var foundCspChecksYaml: Boolean = false + + // Iterate through the entries in the zip + while ((zis.nextEntry.also { entry = it }) != null) { + if (entry!!.name == AIRBYTE_INSTANCE_YAML) { + foundInstanceYaml = true + + // Check the content of airbyte_instance.yaml + val buffer: ByteArray = ByteArray(1024) + var bytesRead: Int + val content: StringBuilder = StringBuilder() + while ((zis.read(buffer).also { bytesRead = it }) != -1) { + content.append(String(buffer, 0, bytesRead)) + } + // workspace information + Assertions.assertTrue(content.toString().contains("workspaces")) + Assertions.assertTrue(content.toString().contains("connections")) + Assertions.assertTrue(content.toString().contains("connectors")) + // license information + Assertions.assertTrue(content.toString().contains("license")) + Assertions.assertTrue(content.toString().contains("expiryDate")) + Assertions.assertTrue(content.toString().contains("usedNodes")) + } else if (entry!!.name == AIRBYTE_DEPLOYMENT_YAML) { + foundDeploymentYaml = true + + // Check the content of airbyte_deployment.yaml + val buffer: ByteArray = ByteArray(1024) + var bytesRead: Int + val content: StringBuilder = StringBuilder() + while ((zis.read(buffer).also { bytesRead = it }) != -1) { + content.append(String(buffer, 0, bytesRead)) + } + // k8s information + Assertions.assertTrue(content.toString().contains("k8s")) + Assertions.assertTrue(content.toString().contains("nodes")) + Assertions.assertTrue(content.toString().contains("pods")) + } else if (entry!!.name == AIRBYTE_CSP_CHECKS) { + foundCspChecksYaml = true + } + } + + // Ensure all yaml files are present in the zip + Assertions.assertTrue(foundInstanceYaml) + Assertions.assertTrue(foundDeploymentYaml) + Assertions.assertTrue(foundCspChecksYaml) + } + } + } +} + +private fun mockDiagnosticToolHandler(): DiagnosticToolHandler { + val workspaceService: WorkspaceService = + mockk { + every { listStandardWorkspaces(false) } returns listOf(standardWorkspace) + } + + val connectionService: ConnectionService = + mockk { + every { listWorkspaceStandardSyncs(standardWorkspace.workspaceId, false) } returns listOf(standardSync) + } + + val sourceService: SourceService = + mockk { + every { listWorkspaceSourceConnection(any()) } returns listOf(sourceConnection) + every { isSourceActive(any()) } returns true + every { getSourceDefinitionFromSource(sourceConnection.sourceDefinitionId) } returns mockk() + } + + val destinationService: DestinationService = + mockk { + every { listWorkspaceDestinationConnection(any()) } returns listOf(destinationConnection) + every { isDestinationActive(any()) } returns true + every { getStandardDestinationDefinition(destinationConnection.destinationDefinitionId) } returns mockk() + } + + val actorDefinitionVersionHelper: ActorDefinitionVersionHelper = + mockk { + every { getSourceVersionWithOverrideStatus(any(), any(), any()) } returns actorDefinitionVersion + every { getDestinationVersionWithOverrideStatus(any(), any(), any()) } returns actorDefinitionVersion + } + + val cspChecker: CspChecker = + mockk { + every { check() } returns CheckResult(Storage(StorageType.LOCAL, emptyList())) + } + + val instanceConfigurationHandler: InstanceConfigurationHandler = + mockk { + every { licenseInfo() } returns + LicenseInfoResponse() + .edition( + "pro", + ).licenseStatus(LicenseStatus.PRO) + .expirationDate(OffsetDateTime.now().plusDays(10).toEpochSecond()) + .usedNodes(2) + } + + val kubernetesClient: KubernetesClient = mockk {} + + val diagnosticToolHandler = + DiagnosticToolHandler( + workspaceService, + connectionService, + sourceService, + destinationService, + actorDefinitionVersionHelper, + instanceConfigurationHandler, + kubernetesClient, + cspChecker, + ) + + val node = + Node().apply { + metadata = ObjectMeta().apply { name = "node1" } + status = + NodeStatus().apply { + conditions = + listOf( + NodeCondition().apply { + type = "Ready" + status = "true" + }, + ) + allocatable = mapOf("cpu" to Quantity("500m"), "memory" to Quantity("1Gi")) + } + } + + every { kubernetesClient.nodes() } returns + mockk { + every { list() } returns NodeList().apply { items = listOf(node) } + } + + val container = + Container().apply { + name = "containerName" + resources = + ResourceRequirements().apply { + limits = mapOf("cpu" to Quantity("500m"), "memory" to Quantity("1Gi")) + } + } + + val pod = + Pod().apply { + metadata = ObjectMeta().apply { name = "pod1" } + status = PodStatus().apply { phase = "Running" } + spec = PodSpec().apply { containers = listOf(container) } + } + + val podList = PodList().apply { items = listOf(pod) } + + every { kubernetesClient.pods() } returns + mockk { + every { inNamespace("ab") } returns + mockk { + every { list() } returns podList + } + } + + return diagnosticToolHandler +} + +private val standardWorkspace: StandardWorkspace = + StandardWorkspace() + .withName("workspace1") + .withWorkspaceId(UUID.randomUUID()) + +private val standardSync: StandardSync = + StandardSync() + .withName("connection1") + .withStatus(StandardSync.Status.ACTIVE) + .withConnectionId(UUID.randomUUID()) + .withSourceId(UUID.randomUUID()) + .withDestinationId(UUID.randomUUID()) + +private val sourceConnection: SourceConnection = + SourceConnection() + .withSourceId(UUID.randomUUID()) + .withName("source") + .withSourceDefinitionId(UUID.randomUUID()) + +private val actorDefinitionVersion: ActorDefinitionVersionWithOverrideStatus = + ActorDefinitionVersionWithOverrideStatus( + ActorDefinitionVersion() + .withDockerImageTag("tag") + .withSupportState(ActorDefinitionVersion.SupportState.SUPPORTED), + true, + ) + +private val destinationConnection: DestinationConnection = + DestinationConnection() + .withDestinationId(UUID.randomUUID()) + .withName("destination1") + .withDestinationDefinitionId(UUID.randomUUID()) diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerTest.kt index 3f4f5594cb1..c2b1d8cd2a8 100644 --- a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerTest.kt +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ResourceBootstrapHandlerTest.kt @@ -3,18 +3,14 @@ package io.airbyte.commons.server.handlers import io.airbyte.commons.server.authorization.ApiAuthorizationHelper import io.airbyte.commons.server.support.CurrentUserService import io.airbyte.config.AuthenticatedUser -import io.airbyte.config.Organization import io.airbyte.config.OrganizationPaymentConfig import io.airbyte.config.Permission import io.airbyte.data.services.OrganizationPaymentConfigService import io.airbyte.data.services.OrganizationService import io.airbyte.data.services.PermissionService import io.airbyte.data.services.WorkspaceService -import io.airbyte.featureflag.BillingInArrearsForNewSignups import io.airbyte.featureflag.FeatureFlagClient import io.kotest.assertions.asClue -import io.kotest.matchers.booleans.shouldBeFalse -import io.kotest.matchers.booleans.shouldBeTrue import io.kotest.matchers.shouldBe import io.mockk.every import io.mockk.mockk @@ -65,32 +61,12 @@ class ResourceBootstrapHandlerTest { } @Test - fun `creates organization without billing in arrears`() { + fun `creates organization with organization payment config`() { val spy = spyk(handler) - val createdOrgSlot = slot() - - every { spy.findExistingOrganization(any()) } returns null - every { featureFlagClient.boolVariation(eq(BillingInArrearsForNewSignups), any()) } returns false - every { organizationService.writeOrganization(capture(createdOrgSlot)) } returns Unit - - spy.findOrCreateOrganizationAndPermission(user) - - verify { organizationService.writeOrganization(any()) } - - createdOrgSlot.captured.asClue { - it.orgLevelBilling.shouldBeFalse() - } - } - - @Test - fun `creates organization with billing in arrears`() { - val spy = spyk(handler) - val createdOrgSlot = slot() val paymentConfigSlot = slot() every { spy.findExistingOrganization(any()) } returns null - every { featureFlagClient.boolVariation(eq(BillingInArrearsForNewSignups), any()) } returns true - every { organizationService.writeOrganization(capture(createdOrgSlot)) } returns Unit + every { organizationService.writeOrganization(any()) } returns Unit every { organizationPaymentConfigService.savePaymentConfig(capture(paymentConfigSlot)) } returns Unit spy.findOrCreateOrganizationAndPermission(user) @@ -98,10 +74,6 @@ class ResourceBootstrapHandlerTest { verify { organizationService.writeOrganization(any()) } verify { organizationPaymentConfigService.savePaymentConfig(any()) } - createdOrgSlot.captured.asClue { - it.orgLevelBilling.shouldBeTrue() - } - paymentConfigSlot.captured.asClue { it.paymentStatus shouldBe OrganizationPaymentConfig.PaymentStatus.UNINITIALIZED it.organizationId shouldBe orgId diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/helpers/MapperSecretHelperTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/helpers/MapperSecretHelperTest.kt new file mode 100644 index 00000000000..ecea15f30e0 --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/helpers/MapperSecretHelperTest.kt @@ -0,0 +1,307 @@ +package io.airbyte.commons.server.handlers.helpers + +import com.fasterxml.jackson.databind.node.ObjectNode +import io.airbyte.api.problems.throwable.generated.MapperSecretNotFoundProblem +import io.airbyte.commons.constants.AirbyteSecretConstants +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.server.helpers.ConnectionHelpers +import io.airbyte.config.AirbyteSecret +import io.airbyte.config.Configs.DeploymentMode +import io.airbyte.config.ConfiguredAirbyteCatalog +import io.airbyte.config.MapperConfig +import io.airbyte.config.ScopeType +import io.airbyte.config.SecretPersistenceConfig +import io.airbyte.config.mapper.configs.AesEncryptionConfig +import io.airbyte.config.mapper.configs.AesMode +import io.airbyte.config.mapper.configs.AesPadding +import io.airbyte.config.mapper.configs.EncryptionConfig +import io.airbyte.config.mapper.configs.EncryptionMapperConfig +import io.airbyte.config.mapper.configs.HashingConfig +import io.airbyte.config.mapper.configs.HashingMapperConfig +import io.airbyte.config.mapper.configs.HashingMethods +import io.airbyte.config.secrets.JsonSecretsProcessor +import io.airbyte.config.secrets.SecretsRepositoryReader +import io.airbyte.config.secrets.SecretsRepositoryWriter +import io.airbyte.data.services.SecretPersistenceConfigService +import io.airbyte.data.services.WorkspaceService +import io.airbyte.featureflag.Organization +import io.airbyte.featureflag.TestClient +import io.airbyte.featureflag.UseRuntimeSecretPersistence +import io.airbyte.mappers.transformations.EncryptionMapper +import io.airbyte.mappers.transformations.HashingMapper +import io.airbyte.mappers.transformations.Mapper +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import java.util.Optional +import java.util.UUID + +internal class MapperSecretHelperTest { + companion object { + private const val SECRET_VALUE = "my_secret_value" + private const val SECRET_COORDINATE = "airbyte_coordinate" + private val WORKSPACE_ID = UUID.randomUUID() + private val ORGANIZATION_ID = UUID.randomUUID() + } + + private val workspaceService = mockk() + private val secretPersistenceConfigService = mockk() + private val secretsRepositoryWriter = mockk() + private val secretsRepositoryReader = mockk() + private val secretsProcessor = mockk() + private val featureFlagClient = mockk() + + private val hashingMapper = HashingMapper() + private val encryptionMapper = EncryptionMapper() + + private val mapperSecretHelper = + MapperSecretHelper( + mappers = listOf(encryptionMapper as Mapper, hashingMapper as Mapper), + workspaceService = workspaceService, + secretPersistenceConfigService = secretPersistenceConfigService, + secretsRepositoryWriter = secretsRepositoryWriter, + secretsRepositoryReader = secretsRepositoryReader, + featureFlagClient = featureFlagClient, + secretsProcessor = secretsProcessor, + deploymentMode = DeploymentMode.CLOUD, + ) + + @BeforeEach + fun setup() { + every { workspaceService.getOrganizationIdFromWorkspaceId(WORKSPACE_ID) } returns Optional.of(ORGANIZATION_ID) + every { featureFlagClient.boolVariation(UseRuntimeSecretPersistence, Organization(ORGANIZATION_ID)) } returns true + every { secretPersistenceConfigService.get(ScopeType.ORGANIZATION, ORGANIZATION_ID) } returns mockk() + } + + @Test + fun `test create mapper secrets`() { + val mapperConfig = + EncryptionMapperConfig( + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "target", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Hydrated(SECRET_VALUE), + ), + ) + val catalogWithSecrets = generateCatalogWithMapper(mapperConfig) + + val configSpec = encryptionMapper.spec().jsonSchema().get("properties").get("config") + val configNoSecrets = + Jsons.jsonNode( + mapOf( + "algorithm" to "AES", + "targetField" to "target", + "mode" to "CBC", + "padding" to "NoPadding", + "key" to mapOf("_secret" to SECRET_COORDINATE), + ), + ) + + every { + secretsRepositoryWriter.createFromConfig(eq(WORKSPACE_ID), eq(Jsons.jsonNode(mapperConfig.config())), eq(configSpec), any()) + } returns configNoSecrets + + val catalogWithoutSecrets = mapperSecretHelper.createAndReplaceMapperSecrets(WORKSPACE_ID, catalogWithSecrets) + + val expectedConfig = + mapperConfig.copy( + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "target", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Reference(SECRET_COORDINATE), + ), + ) + + assertEquals(expectedConfig, catalogWithoutSecrets.streams.first().mappers.first()) + + verify { secretsRepositoryWriter.createFromConfig(eq(WORKSPACE_ID), eq(Jsons.jsonNode(mapperConfig.config())), eq(configSpec), any()) } + } + + @Test + fun `test without secrets in spec does not try to use secret persistence`() { + val mapperConfig = + HashingMapperConfig(config = HashingConfig(targetField = "target", method = HashingMethods.SHA256, fieldNameSuffix = "_hashed")) + val catalog = generateCatalogWithMapper(mapperConfig) + + val resultingCatalog = mapperSecretHelper.createAndReplaceMapperSecrets(WORKSPACE_ID, catalog) + assertEquals(mapperConfig, resultingCatalog.streams.first().mappers.first()) + + verify(exactly = 0) { + secretPersistenceConfigService.get(any(), any()) + secretsRepositoryWriter.createFromConfig(any(), any(), any(), any()) + } + } + + @Test + fun `test mask secrets for output`() { + val mapperConfig = + EncryptionMapperConfig( + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "target", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Reference(SECRET_COORDINATE), + ), + ) + + val configSpec = encryptionMapper.spec().jsonSchema().get("properties").get("config") + val maskedConfig = + Jsons.jsonNode( + mapOf( + "algorithm" to "AES", + "targetField" to "target", + "mode" to "CBC", + "padding" to "NoPadding", + "key" to AirbyteSecretConstants.SECRETS_MASK, + ), + ) + + every { secretsProcessor.prepareSecretsForOutput(Jsons.jsonNode(mapperConfig.config()), configSpec) } returns maskedConfig + + val catalog = generateCatalogWithMapper(mapperConfig) + val maskedCatalog = mapperSecretHelper.maskMapperSecrets(catalog) + + val expectedConfig = + mapperConfig.copy( + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "target", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Hydrated(AirbyteSecretConstants.SECRETS_MASK), + ), + ) + + assertEquals(expectedConfig, maskedCatalog.streams.first().mappers.first()) + + verify { secretsProcessor.prepareSecretsForOutput(Jsons.jsonNode(mapperConfig.config()), configSpec) } + } + + @Test + fun `test updating mapper config with masked secrets`() { + val mapperId = UUID.randomUUID() + val maskedUpdatedMapperConfig = + EncryptionMapperConfig( + id = mapperId, + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "some_other_target", + fieldNameSuffix = "_enc", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Hydrated(AirbyteSecretConstants.SECRETS_MASK), + ), + ) + val maskedUpdatedConfigJson = Jsons.jsonNode(maskedUpdatedMapperConfig.config()) as ObjectNode + val catalogWithMaskedSecrets = generateCatalogWithMapper(maskedUpdatedMapperConfig) + + val persistedMapperConfig = + EncryptionMapperConfig( + id = mapperId, + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "target", + fieldNameSuffix = "_enc", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Reference(SECRET_COORDINATE), + ), + ) + val persistedCatalog = generateCatalogWithMapper(persistedMapperConfig) + + val persistedConfigJson = Jsons.jsonNode(persistedMapperConfig.config()) as ObjectNode + val hydratedPersistedConfigJson = persistedConfigJson.deepCopy().put("key", SECRET_VALUE) + + every { secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence(eq(persistedConfigJson), any()) } returns hydratedPersistedConfigJson + + val configSpec = encryptionMapper.spec().jsonSchema().get("properties").get("config") + val resolvedUpdatedConfigJson = maskedUpdatedConfigJson.deepCopy().put("key", SECRET_VALUE) + + every { secretsProcessor.copySecrets(hydratedPersistedConfigJson, maskedUpdatedConfigJson, configSpec) } returns resolvedUpdatedConfigJson + + val expectedMapperConfig = + maskedUpdatedMapperConfig.copy( + config = (maskedUpdatedMapperConfig.config() as AesEncryptionConfig).copy(key = AirbyteSecret.Reference(SECRET_COORDINATE)), + ) + + every { + secretsRepositoryWriter.updateFromConfig(eq(WORKSPACE_ID), eq(persistedConfigJson), eq(resolvedUpdatedConfigJson), eq(configSpec), any()) + } returns Jsons.jsonNode(expectedMapperConfig.config()) + val res = mapperSecretHelper.updateAndReplaceMapperSecrets(WORKSPACE_ID, persistedCatalog, catalogWithMaskedSecrets) + assertEquals(expectedMapperConfig, res.streams.first().mappers.first()) + + verify { + secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence(eq(persistedConfigJson), any()) + secretsProcessor.copySecrets(hydratedPersistedConfigJson, maskedUpdatedConfigJson, configSpec) + secretsRepositoryWriter.updateFromConfig(eq(WORKSPACE_ID), eq(persistedConfigJson), eq(resolvedUpdatedConfigJson), eq(configSpec), any()) + } + } + + @Test + fun `test updating mapper config with masked secrets and missing previous secret is not supported`() { + val maskedUpdatedMapperConfig = + EncryptionMapperConfig( + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "some_other_target", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Hydrated(AirbyteSecretConstants.SECRETS_MASK), + ), + ) + val catalogWithMaskedSecrets = generateCatalogWithMapper(maskedUpdatedMapperConfig) + + val referencedMapperConfig = + EncryptionMapperConfig( + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "target", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Reference(SECRET_COORDINATE), + ), + ) + val persistedCatalog = generateCatalogWithMapper(referencedMapperConfig) + + val maskedConfig = + Jsons.jsonNode( + mapOf( + "algorithm" to "AES", + "targetField" to "target", + "mode" to "CBC", + "padding" to "NoPadding", + "key" to AirbyteSecretConstants.SECRETS_MASK, + ), + ) + + val configSpec = encryptionMapper.spec().jsonSchema().get("properties").get("config") + every { secretsProcessor.prepareSecretsForOutput(Jsons.jsonNode(referencedMapperConfig.config()), configSpec) } returns maskedConfig + + assertThrows { + mapperSecretHelper.updateAndReplaceMapperSecrets(WORKSPACE_ID, persistedCatalog, catalogWithMaskedSecrets) + } + } + + private fun generateCatalogWithMapper(mapperConfig: MapperConfig): ConfiguredAirbyteCatalog { + val catalog = ConnectionHelpers.generateBasicConfiguredAirbyteCatalog() + catalog.streams.first().mappers = listOf(mapperConfig) + return catalog + } +} diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/helpers/CronExpressionHelperTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/helpers/CronExpressionHelperTest.kt new file mode 100644 index 00000000000..047c78d3715 --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/helpers/CronExpressionHelperTest.kt @@ -0,0 +1,69 @@ +package io.airbyte.server.handlers + +import com.cronutils.model.CronType +import com.cronutils.model.definition.CronDefinitionBuilder +import com.cronutils.parser.CronParser +import io.airbyte.commons.server.helpers.CronExpressionHelper +import junit.framework.TestCase.assertEquals +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import org.junit.jupiter.api.assertThrows +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.ValueSource + +const val EVERY_HOUR = "0 0 * * * ?" +const val EVERY_MINUTE = "0 * * * * ?" +const val EVERY_SECOND = "* * * * * ?" +const val EVERY_HALF_MINUTE = "*/2 * * * * ?" +const val SOMETIMES_EVERY_SECOND = "59,0 0 12,18 * * ?" +const val Y2K = "0 0 0 1 1 ? 2000" + +class CronExpressionHelperTest { + private var cronExpressionHelper: CronExpressionHelper = CronExpressionHelper() + private val cronDefinition = CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ) + + @Test + fun testNextExecutionsEveryHour() { + val everyHour = cronExpressionHelper.validateCronExpression(EVERY_HOUR) + val result = cronExpressionHelper.getNextExecutions(everyHour, 2) + + assertEquals(2, result.size) + assertEquals(result[1] - result[0], 3600) + } + + @ParameterizedTest + @ValueSource(strings = [EVERY_SECOND, SOMETIMES_EVERY_SECOND, EVERY_HALF_MINUTE]) + fun testCheckDoesNotExecuteMoreThanOncePerMinuteThrows(cronExpression: String) { + val cron = CronParser(cronDefinition).parse(cronExpression) + assertThrows { + cronExpressionHelper.checkDoesNotExecuteMoreThanOncePerMinute(cron) + } + } + + @ParameterizedTest + @ValueSource(strings = [EVERY_HOUR, Y2K, EVERY_MINUTE]) + fun testCheckDoesNotExecuteMoreThanOncePerMinutePasses(cronExpression: String) { + val cron = CronParser(cronDefinition).parse(cronExpression) + assertDoesNotThrow { + cronExpressionHelper.checkDoesNotExecuteMoreThanOncePerMinute(cron) + } + } + + @ParameterizedTest + @ValueSource(strings = [EVERY_SECOND, SOMETIMES_EVERY_SECOND, EVERY_HALF_MINUTE]) + fun testCheckDoesNotExecuteMoreThanOncePerHourThrows(cronExpression: String) { + val cron = CronParser(cronDefinition).parse(cronExpression) + assertThrows { + cronExpressionHelper.checkDoesNotExecuteMoreThanOncePerHour(cron) + } + } + + @ParameterizedTest + @ValueSource(strings = [EVERY_HOUR]) + fun testCheckDoesNotExecuteMoreThanOncePerHourPasses(cronExpression: String) { + val cron = CronParser(cronDefinition).parse(cronExpression) + assertDoesNotThrow { + cronExpressionHelper.checkDoesNotExecuteMoreThanOncePerHour(cron) + } + } +} diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/services/ConnectionServiceTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/services/ConnectionServiceTest.kt new file mode 100644 index 00000000000..3bf0ed637b8 --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/services/ConnectionServiceTest.kt @@ -0,0 +1,432 @@ +package io.airbyte.commons.server.services + +import io.airbyte.api.model.generated.ConnectionStatus +import io.airbyte.commons.server.ConnectionId +import io.airbyte.commons.server.handlers.helpers.ConnectionTimelineEventHelper +import io.airbyte.commons.server.scheduler.EventRunner +import io.airbyte.config.Job +import io.airbyte.config.JobStatus +import io.airbyte.config.StandardSync +import io.airbyte.data.services.JobService +import io.airbyte.data.services.shared.ConnectionAutoDisabledReason +import io.airbyte.persistence.job.JobNotifier +import io.airbyte.persistence.job.JobPersistence +import io.kotest.matchers.booleans.shouldBeFalse +import io.kotest.matchers.booleans.shouldBeTrue +import io.mockk.Runs +import io.mockk.every +import io.mockk.just +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test +import java.time.Duration +import java.time.Instant +import java.util.Optional +import java.util.UUID +import io.airbyte.data.services.ConnectionService as ConnectionRepository + +class ConnectionServiceTest { + private val connectionRepository: ConnectionRepository = mockk() + private val connectionTimelineEventHelper: ConnectionTimelineEventHelper = mockk() + private val warnOrDisableHelper: WarnOrDisableConnectionHelper = mockk() + private val eventRunner: EventRunner = mockk() + + private val service = + ConnectionServiceImpl( + connectionRepository, + connectionTimelineEventHelper, + warnOrDisableHelper, + eventRunner, + ) + + private val connectionId1 = ConnectionId(UUID.randomUUID()) + private val connectionId2 = ConnectionId(UUID.randomUUID()) + private val connectionId3 = ConnectionId(UUID.randomUUID()) + + @Nested + inner class DisableConnections { + @Test + fun `should call connectionRepository to disable connections`() { + every { connectionRepository.disableConnectionsById(any()) } returns setOf(connectionId1.value, connectionId2.value, connectionId3.value) + every { connectionTimelineEventHelper.logStatusChangedEventInConnectionTimeline(any(), any(), any(), any()) } returns mockk() + every { eventRunner.update(any()) } returns mockk() + + service.disableConnections(setOf(connectionId1, connectionId2, connectionId3), null) + + verify { connectionRepository.disableConnectionsById(listOf(connectionId1.value, connectionId2.value, connectionId3.value)) } + } + + @Test + fun `should add connection timeline events for disabled connections`() { + every { connectionRepository.disableConnectionsById(any()) } returns setOf(connectionId2.value, connectionId3.value) // just 2 and 3 + every { connectionTimelineEventHelper.logStatusChangedEventInConnectionTimeline(any(), any(), any(), any()) } returns mockk() + every { eventRunner.update(any()) } returns mockk() + + service.disableConnections(setOf(connectionId1, connectionId2, connectionId3), ConnectionAutoDisabledReason.INVALID_PAYMENT_METHOD) + + // connectionId1 was already disabled according to the connectionRepository, so no event should be added + verify(exactly = 0) { + connectionTimelineEventHelper.logStatusChangedEventInConnectionTimeline( + connectionId1.value, + any(), + any(), + any(), + ) + } + verify { + connectionTimelineEventHelper.logStatusChangedEventInConnectionTimeline( + connectionId2.value, + ConnectionStatus.INACTIVE, + ConnectionAutoDisabledReason.INVALID_PAYMENT_METHOD.name, + true, + ) + } + verify { + connectionTimelineEventHelper.logStatusChangedEventInConnectionTimeline( + connectionId3.value, + ConnectionStatus.INACTIVE, + ConnectionAutoDisabledReason.INVALID_PAYMENT_METHOD.name, + true, + ) + } + } + + @Test + fun `should call event runner to update disabled connections`() { + every { connectionRepository.disableConnectionsById(any()) } returns setOf(connectionId2.value, connectionId3.value) // just 2 and 3 + every { connectionTimelineEventHelper.logStatusChangedEventInConnectionTimeline(any(), any(), any(), any()) } returns mockk() + every { eventRunner.update(any()) } returns mockk() + + service.disableConnections(setOf(connectionId1, connectionId2, connectionId3), null) + + // connectionId1 was already disabled according to the connectionRepository, so the event runner should not be called for it + verify(exactly = 0) { + eventRunner.update(connectionId1.value) + } + verify { + eventRunner.update(connectionId2.value) + } + verify { + eventRunner.update(connectionId3.value) + } + } + } + + @Nested + inner class WarnOrDisableForConsecutiveFailures { + @Test + fun `should call warnOrDisableHelper`() { + val timestamp = mockk() + every { warnOrDisableHelper.warnOrDisable(any(), any(), any()) } returns true + + service.warnOrDisableForConsecutiveFailures(connectionId1, timestamp) + + verify { warnOrDisableHelper.warnOrDisable(service, connectionId1, timestamp) } + } + } + + @Nested + inner class WarnOrDisableConnectionHelperTest { + private val connectionService: ConnectionService = mockk() + private val connectionRepository: ConnectionRepository = mockk() + private val jobService: JobService = mockk() + private val jobPersistence: JobPersistence = mockk() + private val jobNotifier: JobNotifier = mockk() + + private val maxDaysBeforeDisable = 30 + private val maxJobsBeforeDisable = 5 + private val maxDaysBeforeWarning = 15 + private val maxJobsBeforeWarning = 3 + private val connectionId = UUID.randomUUID() + private val connectionIdWrapped = ConnectionId(connectionId) + private val timestamp = Instant.now() + private val jobId1 = 1L + private val jobId2 = 2L + private val jobId3 = 3L + + private val helper = + WarnOrDisableConnectionHelper( + connectionRepository, + jobService, + jobPersistence, + jobNotifier, + maxDaysBeforeDisable, + maxJobsBeforeDisable, + maxDaysBeforeWarning, + maxJobsBeforeWarning, + ) + + private fun mockJob( + id: Long, + status: JobStatus, + createdAt: Long = Instant.now().epochSecond, + ): Job = + mockk().also { + every { it.id } returns id + every { it.status } returns status + every { it.createdAtInSecond } returns createdAt + every { it.attempts } returns emptyList() + } + + @Nested + inner class AutoDisableConnection { + @Test + fun `should return false if no replication jobs found`() { + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.empty() + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.empty() + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeFalse() + + verify(exactly = 0) { connectionRepository.getStandardSync(any()) } + verify(exactly = 0) { connectionRepository.writeStandardSync(any()) } + verify(exactly = 0) { jobNotifier.autoDisableConnectionWarning(any(), any()) } + verify(exactly = 0) { jobNotifier.autoDisableConnection(any(), any()) } + } + + @Test + fun `should return false if most recent job is not failed`() { + val firstJob = mockJob(id = jobId1, status = JobStatus.SUCCEEDED) + val lastJob = mockJob(id = jobId2, status = JobStatus.SUCCEEDED) + + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.of(firstJob) + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.of(lastJob) + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeFalse() + + verify(exactly = 0) { connectionRepository.getStandardSync(any()) } + verify(exactly = 0) { connectionRepository.writeStandardSync(any()) } + verify(exactly = 0) { jobNotifier.autoDisableConnectionWarning(any(), any()) } + verify(exactly = 0) { jobNotifier.autoDisableConnection(any(), any()) } + } + + @Test + fun `should disable connection if disable thresholds are met`() { + val failedJob = + mockJob( + id = jobId1, + status = JobStatus.FAILED, + createdAt = timestamp.minus(Duration.ofDays(31)).epochSecond, + ) + val sync = + mockk(relaxed = true).also { + every { it.status } returns StandardSync.Status.ACTIVE + every { it.connectionId } returns connectionId + } + + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.of(failedJob) + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.of(failedJob) + every { jobService.getPriorJobWithStatusForScopeAndJobId(connectionId.toString(), jobId1, JobStatus.FAILED) } returns null + every { jobService.countFailedJobsSinceLastSuccessForScope(connectionId.toString()) } returns maxJobsBeforeDisable + every { jobService.lastSuccessfulJobForScope(connectionId.toString()) } returns null + every { connectionRepository.getStandardSync(connectionId) } returns sync + every { connectionRepository.writeStandardSync(sync) } just Runs + every { jobNotifier.autoDisableConnection(any(), any()) } just Runs + every { jobPersistence.getAttemptStats(any(), any()) } returns mockk() + every { connectionService.disableConnections(any(), any()) } returns mockk() + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeTrue() + + verify { + connectionService.disableConnections( + setOf(connectionIdWrapped), + ConnectionAutoDisabledReason.TOO_MANY_FAILED_JOBS_WITH_NO_RECENT_SUCCESS, + ) + } + verify { jobNotifier.autoDisableConnection(failedJob, any()) } + } + + @Test + fun `should send warning if warning thresholds are met`() { + val priorFailedJob = + mockJob( + id = jobId1, + status = JobStatus.FAILED, + createdAt = timestamp.minus(Duration.ofDays(20)).epochSecond, + ) + val failedJob = + mockJob( + id = jobId2, + status = JobStatus.FAILED, + createdAt = timestamp.epochSecond, + ) + val sync = + mockk(relaxed = true).also { + every { it.status } returns StandardSync.Status.ACTIVE + } + + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.of(priorFailedJob) + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.of(failedJob) + every { jobService.getPriorJobWithStatusForScopeAndJobId(connectionId.toString(), jobId2, JobStatus.FAILED) } returns priorFailedJob + every { jobService.countFailedJobsSinceLastSuccessForScope(connectionId.toString()) } returns maxJobsBeforeWarning + every { jobService.lastSuccessfulJobForScope(connectionId.toString()) } returns null + every { connectionRepository.getStandardSync(connectionId) } returns sync + every { jobNotifier.autoDisableConnectionWarning(any(), any()) } just Runs + every { jobPersistence.getAttemptStats(any(), any()) } returns mockk() + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeFalse() + + verify { jobNotifier.autoDisableConnectionWarning(failedJob, any()) } + } + + @Test + fun `should not send a warning if days threshold is not met`() { + val priorFailedJob = + mockJob( + id = jobId1, + status = JobStatus.FAILED, + // not long enough ago + createdAt = timestamp.minus(Duration.ofDays(14)).epochSecond, + ) + val failedJob = + mockJob( + id = jobId2, + status = JobStatus.FAILED, + createdAt = timestamp.epochSecond, + ) + val sync = + mockk(relaxed = true).also { + every { it.status } returns StandardSync.Status.ACTIVE + } + + // meets failure count warning threshold + every { jobService.countFailedJobsSinceLastSuccessForScope(connectionId.toString()) } returns maxJobsBeforeWarning + + every { jobService.getPriorJobWithStatusForScopeAndJobId(connectionId.toString(), jobId2, JobStatus.FAILED) } returns priorFailedJob + every { jobService.lastSuccessfulJobForScope(connectionId.toString()) } returns null + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.of(priorFailedJob) + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.of(failedJob) + every { connectionRepository.getStandardSync(connectionId) } returns sync + every { jobNotifier.autoDisableConnectionWarning(any(), any()) } just Runs + every { jobPersistence.getAttemptStats(any(), any()) } returns mockk() + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeFalse() + + verify(exactly = 0) { jobNotifier.autoDisableConnectionWarning(failedJob, any()) } + } + + @Test + fun `should not disable connection if days threshold is not met`() { + val failedJob = + mockJob( + id = jobId1, + status = JobStatus.FAILED, + // not long enough ago + createdAt = timestamp.minus(Duration.ofDays(29)).epochSecond, + ) + val sync = + mockk(relaxed = true).also { + every { it.status } returns StandardSync.Status.ACTIVE + every { it.connectionId } returns connectionId + } + + // meets failure count disable threshold + every { jobService.countFailedJobsSinceLastSuccessForScope(connectionId.toString()) } returns maxJobsBeforeDisable + + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.of(failedJob) + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.of(failedJob) + every { jobService.getPriorJobWithStatusForScopeAndJobId(connectionId.toString(), jobId1, JobStatus.FAILED) } returns null + every { jobService.lastSuccessfulJobForScope(connectionId.toString()) } returns null + every { connectionRepository.getStandardSync(connectionId) } returns sync + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeFalse() + + verify(exactly = 0) { connectionRepository.writeStandardSync(any()) } + verify(exactly = 0) { jobNotifier.autoDisableConnection(any(), any()) } + } + + @Test + fun `should not disable connection if failure count threshold is not met`() { + val failedJob = + mockJob( + id = jobId1, + status = JobStatus.FAILED, + // meets days threshold + createdAt = timestamp.minus(Duration.ofDays(30)).epochSecond, + ) + val sync = + mockk(relaxed = true).also { + every { it.status } returns StandardSync.Status.ACTIVE + every { it.connectionId } returns connectionId + } + + // does not meet failure count disable threshold + every { jobService.countFailedJobsSinceLastSuccessForScope(connectionId.toString()) } returns maxJobsBeforeDisable - 1 + + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.of(failedJob) + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.of(failedJob) + every { jobService.getPriorJobWithStatusForScopeAndJobId(connectionId.toString(), jobId1, JobStatus.FAILED) } returns null + every { jobService.lastSuccessfulJobForScope(connectionId.toString()) } returns null + every { connectionRepository.getStandardSync(connectionId) } returns sync + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeFalse() + + verify(exactly = 0) { connectionRepository.writeStandardSync(any()) } + verify(exactly = 0) { jobNotifier.autoDisableConnection(any(), any()) } + } + + @Test + fun `should only send warning if no prior warning`() { + val firstJob = + mockJob( + id = jobId1, + status = JobStatus.FAILED, + createdAt = timestamp.minus(Duration.ofDays(20)).epochSecond, + ) + + // 16 days after first job, should have triggered warning + val priorJob = + mockJob( + id = jobId2, + status = JobStatus.FAILED, + createdAt = timestamp.minus(Duration.ofDays(4)).epochSecond, + ) + + // 20 days after first failure, but prior job already triggered warning + val mostRecentJob = + mockJob( + id = jobId3, + status = JobStatus.FAILED, + createdAt = timestamp.epochSecond, + ) + + val sync = + mockk(relaxed = true).also { + every { it.status } returns StandardSync.Status.ACTIVE + } + + // the prior job triggered the warning so most recent job is one above the warning threshold + every { jobService.countFailedJobsSinceLastSuccessForScope(connectionId.toString()) } returns maxJobsBeforeWarning + 1 + + every { jobService.getPriorJobWithStatusForScopeAndJobId(connectionId.toString(), jobId3, JobStatus.FAILED) } returns priorJob + every { jobService.lastSuccessfulJobForScope(connectionId.toString()) } returns null + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.of(firstJob) + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.of(mostRecentJob) + every { connectionRepository.getStandardSync(connectionId) } returns sync + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeFalse() + + verify(exactly = 0) { jobNotifier.autoDisableConnectionWarning(any(), any()) } + } + + @Test + fun `should return false if connection is already inactive`() { + val failedJob = mockJob(id = jobId1, status = JobStatus.FAILED) + val sync = + mockk(relaxed = true) { + every { status } returns StandardSync.Status.INACTIVE + } + + every { jobPersistence.getFirstReplicationJob(connectionId) } returns Optional.of(failedJob) + every { jobPersistence.getLastReplicationJob(connectionId) } returns Optional.of(failedJob) + every { connectionRepository.getStandardSync(connectionId) } returns sync + every { jobService.getPriorJobWithStatusForScopeAndJobId(connectionId.toString(), 1L, JobStatus.FAILED) } returns null + every { jobService.lastSuccessfulJobForScope(connectionId.toString()) } returns null + + helper.warnOrDisable(connectionService, connectionIdWrapped, timestamp).shouldBeFalse() + + verify(exactly = 0) { connectionRepository.writeStandardSync(any()) } + } + } + } +} diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/services/OrganizationServiceTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/services/OrganizationServiceTest.kt new file mode 100644 index 00000000000..017fdcd4e1b --- /dev/null +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/services/OrganizationServiceTest.kt @@ -0,0 +1,147 @@ +package io.airbyte.commons.server.services + +import io.airbyte.api.problems.throwable.generated.ResourceNotFoundProblem +import io.airbyte.api.problems.throwable.generated.StateConflictProblem +import io.airbyte.commons.server.ConnectionId +import io.airbyte.commons.server.OrganizationId +import io.airbyte.config.OrganizationPaymentConfig +import io.airbyte.config.OrganizationPaymentConfig.PaymentStatus +import io.airbyte.data.services.shared.ConnectionAutoDisabledReason +import io.kotest.assertions.throwables.shouldThrow +import io.kotest.matchers.shouldBe +import io.mockk.every +import io.mockk.mockk +import io.mockk.slot +import io.mockk.verify +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test +import java.util.UUID +import io.airbyte.data.services.ConnectionService as ConnectionRepository +import io.airbyte.data.services.OrganizationPaymentConfigService as OrganizationPaymentConfigRepository + +class OrganizationServiceTest { + private val connectionService: ConnectionService = mockk() + private val connectionRepository: ConnectionRepository = mockk() + private val organizationPaymentConfigRepository: OrganizationPaymentConfigRepository = mockk() + + private val service = + OrganizationServiceImpl( + connectionService, + connectionRepository, + organizationPaymentConfigRepository, + ) + + private val organizationId = OrganizationId(UUID.randomUUID()) + private val connectionId1 = ConnectionId(UUID.randomUUID()) + private val connectionId2 = ConnectionId(UUID.randomUUID()) + private val connectionId3 = ConnectionId(UUID.randomUUID()) + + @Nested + inner class DisableAllConnections { + @Test + fun `should call connectionService to disable connections`() { + every { connectionRepository.listConnectionIdsForOrganization(organizationId.value) } returns + listOf(connectionId1.value, connectionId2.value, connectionId3.value) + every { connectionService.disableConnections(any(), any()) } returns mockk() + + service.disableAllConnections(organizationId, null) + + verify { connectionService.disableConnections(setOf(connectionId1, connectionId2, connectionId3), null) } + } + } + + @Nested + inner class HandlePaymentGracePeriodEnded { + @Test + fun `should throw if orgPaymentConfig is not found`() { + every { organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) } returns null + shouldThrow { service.handlePaymentGracePeriodEnded(organizationId) } + } + + @Test + fun `should throw if organization is not in a grace period`() { + every { organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) } returns + OrganizationPaymentConfig().apply { paymentStatus = PaymentStatus.OKAY } + shouldThrow { service.handlePaymentGracePeriodEnded(organizationId) } + } + + @Test + fun `should update orgPaymentConfig status to disabled`() { + val orgPaymentConfig = OrganizationPaymentConfig().apply { paymentStatus = PaymentStatus.GRACE_PERIOD } + val orgPaymentConfigSlot = slot() + + every { organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) } returns orgPaymentConfig + every { organizationPaymentConfigRepository.savePaymentConfig(capture(orgPaymentConfigSlot)) } returns Unit + every { connectionRepository.listConnectionIdsForOrganization(organizationId.value) } returns emptyList() + every { connectionService.disableConnections(any(), any()) } returns mockk() + + service.handlePaymentGracePeriodEnded(organizationId) + + verify { organizationPaymentConfigRepository.savePaymentConfig(orgPaymentConfig) } + orgPaymentConfigSlot.captured.paymentStatus shouldBe PaymentStatus.DISABLED + } + + @Test + fun `should call disableAllConnections with invalid payment method reason`() { + val orgPaymentConfig = OrganizationPaymentConfig().apply { paymentStatus = PaymentStatus.GRACE_PERIOD } + every { organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) } returns orgPaymentConfig + every { organizationPaymentConfigRepository.savePaymentConfig(orgPaymentConfig) } returns Unit + every { connectionRepository.listConnectionIdsForOrganization(organizationId.value) } returns + listOf(connectionId1.value, connectionId2.value, connectionId3.value) + every { connectionService.disableConnections(any(), any()) } returns mockk() + + service.handlePaymentGracePeriodEnded(organizationId) + + verify { + connectionService.disableConnections( + setOf(connectionId1, connectionId2, connectionId3), + ConnectionAutoDisabledReason.INVALID_PAYMENT_METHOD, + ) + } + } + } + + @Nested + inner class HandleUncollectibleInvoice { + @Test + fun `should throw if orgPaymentConfig is not found`() { + every { organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) } returns null + shouldThrow { service.handleUncollectibleInvoice(organizationId) } + } + + @Test + fun `should update orgPaymentConfig status to locked`() { + val orgPaymentConfig = OrganizationPaymentConfig().apply { paymentStatus = PaymentStatus.OKAY } + val orgPaymentConfigSlot = slot() + + every { organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) } returns orgPaymentConfig + every { organizationPaymentConfigRepository.savePaymentConfig(capture(orgPaymentConfigSlot)) } returns Unit + every { connectionRepository.listConnectionIdsForOrganization(organizationId.value) } returns emptyList() + every { connectionService.disableConnections(any(), any()) } returns mockk() + + service.handleUncollectibleInvoice(organizationId) + + verify { organizationPaymentConfigRepository.savePaymentConfig(orgPaymentConfig) } + orgPaymentConfigSlot.captured.paymentStatus shouldBe PaymentStatus.LOCKED + } + + @Test + fun `should call disableAllConnections with uncollectible invoice reason`() { + val orgPaymentConfig = OrganizationPaymentConfig().apply { paymentStatus = PaymentStatus.GRACE_PERIOD } + every { organizationPaymentConfigRepository.findByOrganizationId(organizationId.value) } returns orgPaymentConfig + every { organizationPaymentConfigRepository.savePaymentConfig(orgPaymentConfig) } returns Unit + every { connectionRepository.listConnectionIdsForOrganization(organizationId.value) } returns + listOf(connectionId1.value, connectionId2.value, connectionId3.value) + every { connectionService.disableConnections(any(), any()) } returns mockk() + + service.handleUncollectibleInvoice(organizationId) + + verify { + connectionService.disableConnections( + setOf(connectionId1, connectionId2, connectionId3), + ConnectionAutoDisabledReason.INVOICE_MARKED_UNCOLLECTIBLE, + ) + } + } + } +} diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/LogClient.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/LogClient.kt index 8f8f189b09e..98074157318 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/LogClient.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/LogClient.kt @@ -82,7 +82,7 @@ class LogClient( } fun deleteLogs(logPath: String) { - logger.debug { "Deleting logs from path '$logPath' using ${client.storageType()} storage client..." } + logger.debug { "Deleting logs from path '$logPath' using ${client.storageType} storage client..." } client.delete(id = logPath) logger.debug { "Log delete request complete." } } @@ -91,30 +91,30 @@ class LogClient( logPath: String, numLines: Int, ): LogEvents { - logger.debug { "Tailing $numLines line(s) from logs from path '$logPath' using ${client.storageType()} storage client..." } + logger.debug { "Tailing $numLines line(s) from logs from path '$logPath' using ${client.storageType} storage client..." } val files = client.list(id = logPath).filter { it.endsWith(STRUCTURED_LOG_FILE_EXTENSION) } - logger.debug { "Found ${files.size} files from path '$logPath' using ${client.storageType()} storage client." } + logger.debug { "Found ${files.size} files from path '$logPath' using ${client.storageType} storage client." } val instrumentedFiles = meterRegistry.createGauge( metricName = OssMetricsRegistry.LOG_CLIENT_FILES_RETRIEVED.metricName, - logClientType = client.storageType(), + logClientType = client.storageType, stateObject = files, ) val timer = meterRegistry.createTimer( metricName = OssMetricsRegistry.LOG_CLIENT_FILES_RETRIEVAL_TIME_MS.metricName, - logClientType = client.storageType(), + logClientType = client.storageType, ) val lineCounter = meterRegistry.createCounter( metricName = OssMetricsRegistry.LOG_CLIENT_FILE_LINE_COUNT_RETRIEVED.metricName, - logClientType = client.storageType(), + logClientType = client.storageType, ) val byteCounter = meterRegistry.createCounter( metricName = OssMetricsRegistry.LOG_CLIENT_FILE_LINE_BYTES_RETRIEVED.metricName, - logClientType = client.storageType(), + logClientType = client.storageType, ) val events = @@ -132,20 +132,20 @@ class LogClient( logPath: String, numLines: Int, ): List { - logger.debug { "Tailing $numLines line(s) from logs from path '$logPath' using ${client.storageType()} storage client..." } + logger.debug { "Tailing $numLines line(s) from logs from path '$logPath' using ${client.storageType} storage client..." } val files = client.list(id = logPath) - logger.debug { "Found ${files.size} files from path '$logPath' using ${client.storageType()} storage client." } + logger.debug { "Found ${files.size} files from path '$logPath' using ${client.storageType} storage client." } val instrumentedFiles = meterRegistry.createGauge( metricName = OssMetricsRegistry.LOG_CLIENT_FILES_RETRIEVED.metricName, - logClientType = client.storageType(), + logClientType = client.storageType, stateObject = files, ) val timer = meterRegistry.createTimer( metricName = OssMetricsRegistry.LOG_CLIENT_FILES_RETRIEVAL_TIME_MS.metricName, - logClientType = client.storageType(), + logClientType = client.storageType, ) return if (timer != null) { @@ -162,12 +162,12 @@ class LogClient( val lineCounter = meterRegistry.createCounter( metricName = OssMetricsRegistry.LOG_CLIENT_FILE_LINE_COUNT_RETRIEVED.metricName, - logClientType = client.storageType(), + logClientType = client.storageType, ) val byteCounter = meterRegistry.createCounter( metricName = OssMetricsRegistry.LOG_CLIENT_FILE_LINE_BYTES_RETRIEVED.metricName, - logClientType = client.storageType(), + logClientType = client.storageType, ) val isStructured = files.all { it.endsWith(suffix = STRUCTURED_LOG_FILE_EXTENSION) } diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/LogMdcHelper.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/LogMdcHelper.kt index b3a84eb1a13..b5b5b99b397 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/LogMdcHelper.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/LogMdcHelper.kt @@ -14,6 +14,9 @@ const val DEFAULT_LOG_FILENAME = "logs.log" /** The default MDC key that holds the job log path for log storage. */ const val DEFAULT_JOB_LOG_PATH_MDC_KEY = "job_log_path" +/** The default MDC key that holds the audit log path. */ +const val DEFAULT_AUDIT_LOGGING_PATH_MDC_KEY = "audit_logging_path" + /** * Defines methods for setting various MDC key/values related to logging */ diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt index 6f674b9de91..6c0c2a5033b 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt @@ -195,12 +195,14 @@ internal fun buildBucketConfig(storageConfig: Map): StorageBucke state = "", workloadOutput = "", activityPayload = "", + auditLogging = storageConfig[EnvVar.STORAGE_BUCKET_AUDIT_LOGGING] ?: "", ) private fun buildStorageConfig(): Map = mapOf( EnvVar.STORAGE_TYPE to EnvVar.STORAGE_TYPE.fetchNotNull(), EnvVar.STORAGE_BUCKET_LOG to EnvVar.STORAGE_BUCKET_LOG.fetchNotNull(), + EnvVar.STORAGE_BUCKET_AUDIT_LOGGING to EnvVar.STORAGE_BUCKET_AUDIT_LOGGING.fetchNotNull(), EnvVar.AZURE_STORAGE_CONNECTION_STRING to EnvVar.AZURE_STORAGE_CONNECTION_STRING.fetchNotNull(), EnvVar.GOOGLE_APPLICATION_CREDENTIALS to EnvVar.GOOGLE_APPLICATION_CREDENTIALS.fetchNotNull(), EnvVar.AWS_ACCESS_KEY_ID to EnvVar.AWS_ACCESS_KEY_ID.fetchNotNull(), diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt index 300d6882a86..e72073c8db1 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt @@ -26,6 +26,7 @@ import ch.qos.logback.core.spi.FilterReply import ch.qos.logback.core.util.Duration import ch.qos.logback.core.util.StatusPrinter2 import io.airbyte.commons.envvar.EnvVar +import io.airbyte.commons.logging.DEFAULT_AUDIT_LOGGING_PATH_MDC_KEY import io.airbyte.commons.logging.DEFAULT_JOB_LOG_PATH_MDC_KEY import io.airbyte.commons.storage.DocumentType import org.slf4j.Logger.ROOT_LOGGER_NAME @@ -49,6 +50,7 @@ class AirbyteLogbackCustomConfigurer : listOf( createPlatformAppender(loggerContext = loggerContext), createOperationsJobAppender(loggerContext = loggerContext), + createAuditLogAppender(loggerContext = loggerContext), ) // Register appenders with root logger @@ -87,6 +89,30 @@ class AirbyteLogbackCustomConfigurer : ) } + /** + * Builds the appender for audit log messages. This appender logs all messages to remote storage. + * + * @param loggerContext The logging context. + * @return The operations audit log appender. + */ + private fun createAuditLogAppender(loggerContext: LoggerContext): Appender { + val appenderFactory = { context: Context, discriminatorValue: String -> + createCloudAppender( + context = context, + discriminatorValue = discriminatorValue, + documentType = DocumentType.AUDIT_LOGS, + appenderName = AUDIT_LOGGER_NAME, + ) + } + + return createSiftingAppender( + appenderFactory = appenderFactory, + appenderName = AUDIT_LOGGER_NAME, + contextKey = DEFAULT_AUDIT_LOGGING_PATH_MDC_KEY, + loggerContext = loggerContext, + ) + } + /** * Builds an [AirbyteCloudStorageAppender] for remote logging of log messages. * diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt index 47d5f84c510..1a3d19bdd03 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt @@ -34,3 +34,4 @@ const val CALLER_THREAD_NAME_PATTERN = "CALLER_THREAD_NAME" const val CLOUD_OPERATIONS_JOB_LOGGER_NAME = "airbyte-cloud-operations-job-logger" const val PLATFORM_LOGGER_NAME = "airbyte-platform-logger" +const val AUDIT_LOGGER_NAME = "airbyte-audit-logger" diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt index 87219e561e0..842a734ba37 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt @@ -97,12 +97,22 @@ enum class DocumentType( STATE(prefix = Path.of("/state")), WORKLOAD_OUTPUT(prefix = Path.of("/workload/output")), ACTIVITY_PAYLOADS(prefix = Path.of("/activity-payloads")), + AUDIT_LOGS(prefix = Path.of("audit-logging")), } /** * Interface for writing, reading, and deleting documents. */ interface StorageClient { + /** @property documentType the [DocumentType] of this [StorageClient] */ + val documentType: DocumentType + + /** @property storageType the [StorageType] of this [StorageClient] */ + val storageType: StorageType + + /** @property bucketName the name of the bucket used by this [StorageClient] */ + val bucketName: String + /** * Lists the documents stored at the given id. * @@ -139,27 +149,13 @@ interface StorageClient { */ fun delete(id: String): Boolean - /** - * The [DocumentType] supported by this client. - * - * @return the associated [DocumentType] of the client. - */ - fun documentType(): DocumentType - - /** - * The client storage type. - * - * @return the associated [StorageType] of the client - */ - fun storageType(): StorageType - /** * Generates a file ID. * * @param id a relative file path * @return the file ID including any configured storage prefix */ - fun key(id: String): String = prependIfMissing(prefix = documentType().prefix.toString(), id = id) + fun key(id: String): String = prependIfMissing(prefix = documentType.prefix.toString(), id = id) } /** @@ -175,7 +171,9 @@ class AzureStorageClient( private val type: DocumentType, private val azureClient: BlobServiceClient, ) : StorageClient { - private val bucketName = config.bucketName(type) + override val storageType = StorageType.AZURE + override val documentType = type + override val bucketName = config.bucketName(type) @Inject constructor( @@ -223,10 +221,6 @@ class AzureStorageClient( .getBlobClient(key(id)) .deleteIfExists() - override fun documentType(): DocumentType = type - - override fun storageType(): StorageType = StorageType.AZURE - private fun createBucketIfNotExists() { val blobContainerClient = azureClient.getBlobContainerClient(bucketName) if (!blobContainerClient.exists()) { @@ -248,7 +242,9 @@ class GcsStorageClient( private val type: DocumentType, private val gcsClient: Storage, ) : StorageClient { - private val bucketName = config.bucketName(type) + override val storageType = StorageType.GCS + override val documentType = type + override val bucketName = config.bucketName(type) @Inject constructor( @@ -287,10 +283,6 @@ class GcsStorageClient( override fun delete(id: String): Boolean = gcsClient.delete(BlobId.of(bucketName, key(id))) - override fun documentType(): DocumentType = type - - override fun storageType(): StorageType = StorageType.GCS - @VisibleForTesting internal fun blobId(id: String): BlobId = BlobId.of(bucketName, key(id)) @@ -312,6 +304,10 @@ class LocalStorageClient( private val config: LocalStorageConfig, @Parameter private val type: DocumentType, ) : StorageClient { + override val storageType = StorageType.LOCAL + override val documentType = type + override val bucketName = config.bucketName(type) + override fun list(id: String): List { val res = toPath(id) @@ -341,15 +337,11 @@ class LocalStorageClient( toPath(id) .deleteIfExists() - override fun documentType(): DocumentType = type - - override fun storageType(): StorageType = StorageType.LOCAL - /** Converts an ID [String] to an absolute [Path]. */ internal fun toPath(id: String): Path = Path.of(config.root, type.prefix.toString(), id) /** Converts an absolute [Path] to an ID [String]. */ - internal fun toId(abspath: Path): String = abspath.relativeTo(Path.of(config.root, type.prefix.toString())).pathString + private fun toId(abspath: Path): String = abspath.relativeTo(Path.of(config.root, type.prefix.toString())).pathString } /** @@ -365,13 +357,13 @@ class MinioStorageClient( type: DocumentType, s3Client: S3Client = config.s3Client(), ) : AbstractS3StorageClient(config = config, type = type, s3Client = s3Client) { + override val storageType = StorageType.MINIO + @Inject constructor( config: MinioStorageConfig, @Parameter type: DocumentType, ) : this(config = config, type = type, s3Client = config.s3Client()) - - override fun storageType(): StorageType = StorageType.MINIO } /** @@ -393,7 +385,7 @@ class S3StorageClient( @Parameter type: DocumentType, ) : this(config = config, type = type, s3Client = config.s3Client()) - override fun storageType(): StorageType = StorageType.S3 + override val storageType = StorageType.S3 } /** @@ -410,7 +402,8 @@ abstract class AbstractS3StorageClient internal constructor( private val type: DocumentType, private val s3Client: S3Client, ) : StorageClient { - private val bucketName = config.bucketName(type) + override val documentType: DocumentType = type + override val bucketName = config.bucketName(type) init { runCatching { createBucketIfNotExists() } @@ -485,8 +478,6 @@ abstract class AbstractS3StorageClient internal constructor( return exists } - override fun documentType(): DocumentType = type - private fun createBucketIfNotExists() { if (!doesBucketExist(bucketName=bucketName)) { val createBucketRequest = CreateBucketRequest.builder().bucket(bucketName).build() @@ -574,4 +565,5 @@ fun StorageConfig.bucketName(type: DocumentType): String = DocumentType.WORKLOAD_OUTPUT -> this.buckets.workloadOutput DocumentType.LOGS -> this.buckets.log DocumentType.ACTIVITY_PAYLOADS -> this.buckets.activityPayload + DocumentType.AUDIT_LOGS -> this.buckets.auditLogging?.takeIf { it.isNotBlank() } ?: "" } diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageConfig.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageConfig.kt index fd8be4d5d2c..f64c71fa170 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageConfig.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageConfig.kt @@ -31,6 +31,7 @@ data class StorageBucketConfig( @Value("\${$STORAGE_BUCKET_STATE}") val state: String, @Value("\${$STORAGE_BUCKET_WORKLOAD_OUTPUT}") val workloadOutput: String, @Value("\${$STORAGE_BUCKET_ACTIVITY_PAYLOAD}") val activityPayload: String, + @Value("\${$STORAGE_BUCKET_AUDIT_LOGGING:}") val auditLogging: String?, ) /** @@ -50,6 +51,9 @@ data class AzureStorageConfig( put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) + buckets.auditLogging?.let { + put(EnvVar.STORAGE_BUCKET_AUDIT_LOGGING, it) + } put(EnvVar.STORAGE_TYPE, StorageType.AZURE.name) put(EnvVar.AZURE_STORAGE_CONNECTION_STRING, connectionString) }.mapKeys { it.key.name } @@ -74,6 +78,9 @@ data class GcsStorageConfig( put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) + buckets.auditLogging?.let { + put(EnvVar.STORAGE_BUCKET_AUDIT_LOGGING, it) + } put(EnvVar.STORAGE_TYPE, StorageType.GCS.name) put(EnvVar.GOOGLE_APPLICATION_CREDENTIALS, applicationCredentials) }.mapKeys { it.key.name } @@ -102,6 +109,9 @@ data class S3StorageConfig( put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) + buckets.auditLogging?.let { + put(EnvVar.STORAGE_BUCKET_AUDIT_LOGGING, it) + } put(EnvVar.STORAGE_TYPE, StorageType.S3.name) accessKey?.let { put(EnvVar.AWS_ACCESS_KEY_ID, accessKey) @@ -136,6 +146,9 @@ data class MinioStorageConfig( put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) + buckets.auditLogging?.let { + put(EnvVar.STORAGE_BUCKET_AUDIT_LOGGING, it) + } put(EnvVar.STORAGE_TYPE, StorageType.MINIO.name) put(EnvVar.AWS_ACCESS_KEY_ID, accessKey) put(EnvVar.AWS_SECRET_ACCESS_KEY, secretAccessKey) @@ -162,6 +175,9 @@ class LocalStorageConfig( put(EnvVar.STORAGE_BUCKET_STATE, buckets.state) put(EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, buckets.workloadOutput) put(EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD, buckets.activityPayload) + buckets.auditLogging?.let { + put(EnvVar.STORAGE_BUCKET_AUDIT_LOGGING, it) + } put(EnvVar.STORAGE_TYPE, StorageType.LOCAL.name) }.mapKeys { it.key.name } } diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageConstants.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageConstants.kt index 5f90cb5df54..d7cd20bda6e 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageConstants.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageConstants.kt @@ -34,6 +34,12 @@ const val STORAGE_BUCKET_WORKLOAD_OUTPUT = "${STORAGE_BUCKET}.workload-output" /** Specific setting for the activity bucket. */ const val STORAGE_BUCKET_ACTIVITY_PAYLOAD = "${STORAGE_BUCKET}.activity-payload" +/** Audit logging. */ +const val AUDIT_LOGGING = "audit-logging" + +/** Specific setting for the audit log bucket. */ +const val STORAGE_BUCKET_AUDIT_LOGGING = "${STORAGE_BUCKET}.${AUDIT_LOGGING}" + /** The setting that contains what storage type the client represents. */ const val STORAGE_TYPE = "$STORAGE_ROOT.type" diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/LogClientTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/LogClientTest.kt index cb75beead3f..31294219a84 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/LogClientTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/LogClientTest.kt @@ -91,7 +91,7 @@ internal class LogClientTest { mockk { every { list(any()) } returns listOf(logPath) every { read(any()) } returns logFile.toFile().readText() - every { storageType() } returns StorageType.LOCAL + every { storageType } returns StorageType.LOCAL } val storageClientFactory = mockk { @@ -134,7 +134,7 @@ internal class LogClientTest { mockk { every { list(any()) } returns listOf(logPath) every { read(any()) } returns logFile.toFile().readText() - every { storageType() } returns StorageType.LOCAL + every { storageType } returns StorageType.LOCAL } val storageClientFactory = mockk { @@ -171,7 +171,7 @@ internal class LogClientTest { mockk { every { list(any()) } returns listOf(logPath) every { read(any()) } returns logFile.toFile().readText() - every { storageType() } returns StorageType.LOCAL + every { storageType } returns StorageType.LOCAL } val storageClientFactory = mockk { @@ -238,7 +238,7 @@ internal class LogClientTest { every { read("file2") } returns fileContents2 every { read("file3") } returns fileContents3 every { read("file4") } returns fileContents4 - every { storageType() } returns storageType + every { this@mockk.storageType } returns storageType } val storageClientFactory = mockk { @@ -286,7 +286,7 @@ internal class LogClientTest { every { read("file2$STRUCTURED_LOG_FILE_EXTENSION") } returns objectMapper.writeValueAsString(logEvents2) every { read("file3$STRUCTURED_LOG_FILE_EXTENSION") } returns objectMapper.writeValueAsString(logEvents3) every { read("file4$STRUCTURED_LOG_FILE_EXTENSION") } returns objectMapper.writeValueAsString(logEvents4) - every { storageType() } returns storageType + every { this@mockk.storageType } returns storageType } val storageClientFactory = mockk { @@ -394,7 +394,7 @@ internal class LogClientTest { mockk { every { list(any()) } returns listOf(logPath) every { read(any()) } returns logFile.toFile().readText() - every { storageType() } returns StorageType.LOCAL + every { storageType } returns StorageType.LOCAL } val storageClientFactory = mockk { @@ -606,7 +606,7 @@ internal class LogClientTest { mockk { every { list(any()) } returns listOf(logPath) every { read(any()) } returns logFile.toFile().readText() - every { storageType() } returns StorageType.LOCAL + every { storageType } returns StorageType.LOCAL } val storageClientFactory = mockk { diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt index c602748fee7..567775c7e8b 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt @@ -50,6 +50,7 @@ private class AirbyteCloudStorageAppenderTest { assertEquals("", bucketConfig.state) assertEquals("", bucketConfig.workloadOutput) assertEquals("", bucketConfig.activityPayload) + assertEquals("", bucketConfig.auditLogging) } @Test diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/GcsStorageConfigTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/GcsStorageConfigTest.kt index 99ead14ddc3..f8c19d4405b 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/GcsStorageConfigTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/GcsStorageConfigTest.kt @@ -18,6 +18,8 @@ internal class GcsStorageConfigTest { workloadOutput = "workload-output", log = "log", activityPayload = "activity-payload", + // Audit logging is null by default as it is SME feature only + auditLogging = null, ) val applicationCredentials = MoreResources.readResource("sample_gcs_credentials.json") val gcsStorageConfig = @@ -35,6 +37,33 @@ internal class GcsStorageConfigTest { assertEquals(applicationCredentials, envVarMap[EnvVar.GOOGLE_APPLICATION_CREDENTIALS.name]) } + @Test + internal fun testToEnvVarMapWithAuditLogging() { + val bucketConfig = + StorageBucketConfig( + state = "state", + workloadOutput = "workload-output", + log = "log", + activityPayload = "activity-payload", + auditLogging = "audit-logging", + ) + val applicationCredentials = MoreResources.readResource("sample_gcs_credentials.json") + val gcsStorageConfig = + GcsStorageConfig( + buckets = bucketConfig, + applicationCredentials = applicationCredentials, + ) + val envVarMap = gcsStorageConfig.toEnvVarMap() + assertEquals(7, envVarMap.size) + assertEquals(bucketConfig.log, envVarMap[EnvVar.STORAGE_BUCKET_LOG.name]) + assertEquals(bucketConfig.workloadOutput, envVarMap[EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT.name]) + assertEquals(bucketConfig.activityPayload, envVarMap[EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD.name]) + assertEquals(bucketConfig.state, envVarMap[EnvVar.STORAGE_BUCKET_STATE.name]) + assertEquals(bucketConfig.auditLogging, envVarMap[EnvVar.STORAGE_BUCKET_AUDIT_LOGGING.name]) + assertEquals(StorageType.GCS.name, envVarMap[EnvVar.STORAGE_TYPE.name]) + assertEquals(applicationCredentials, envVarMap[EnvVar.GOOGLE_APPLICATION_CREDENTIALS.name]) + } + @Test internal fun testToString() { val bucketConfig = @@ -43,6 +72,7 @@ internal class GcsStorageConfigTest { workloadOutput = "workload-output", log = "log", activityPayload = "activity-payload", + auditLogging = "audit-logging", ) val applicationCredentials = MoreResources.readResource("sample_gcs_credentials.json") val gcsStorageConfig = diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/MinioStorageConfigTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/MinioStorageConfigTest.kt index c1baa4140f8..9463874b8c4 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/MinioStorageConfigTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/MinioStorageConfigTest.kt @@ -20,6 +20,8 @@ internal class MinioStorageConfigTest { workloadOutput = "workload-output", log = "log", activityPayload = "activity-payload", + // Audit logging is null by default as it is SME feature only + auditLogging = null, ) val s3StorageConfig = MinioStorageConfig( @@ -40,6 +42,39 @@ internal class MinioStorageConfigTest { assertEquals(endpoint, envVarMap[EnvVar.MINIO_ENDPOINT.name]) } + @Test + internal fun testToEnvVarMapWithAuditLogging() { + val accessKey = "access-key" + val secretAccessKey = "secret-access-key" + val endpoint = "http://localhost:8080" + val bucketConfig = + StorageBucketConfig( + state = "state", + workloadOutput = "workload-output", + log = "log", + activityPayload = "activity-payload", + auditLogging = "audit-logging", + ) + val s3StorageConfig = + MinioStorageConfig( + buckets = bucketConfig, + accessKey = accessKey, + secretAccessKey = secretAccessKey, + endpoint = endpoint, + ) + val envVarMap = s3StorageConfig.toEnvVarMap() + assertEquals(9, envVarMap.size) + assertEquals(bucketConfig.log, envVarMap[EnvVar.STORAGE_BUCKET_LOG.name]) + assertEquals(bucketConfig.workloadOutput, envVarMap[EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT.name]) + assertEquals(bucketConfig.activityPayload, envVarMap[EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD.name]) + assertEquals(bucketConfig.state, envVarMap[EnvVar.STORAGE_BUCKET_STATE.name]) + assertEquals(bucketConfig.auditLogging, envVarMap[EnvVar.STORAGE_BUCKET_AUDIT_LOGGING.name]) + assertEquals(StorageType.MINIO.name, envVarMap[EnvVar.STORAGE_TYPE.name]) + assertEquals(accessKey, envVarMap[EnvVar.AWS_ACCESS_KEY_ID.name]) + assertEquals(secretAccessKey, envVarMap[EnvVar.AWS_SECRET_ACCESS_KEY.name]) + assertEquals(endpoint, envVarMap[EnvVar.MINIO_ENDPOINT.name]) + } + @Test internal fun testToString() { val accessKey = "access-key" @@ -51,6 +86,7 @@ internal class MinioStorageConfigTest { workloadOutput = "workload-output", log = "log", activityPayload = "activity-payload", + auditLogging = "audit-logging", ) val s3StorageConfig = MinioStorageConfig( diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/S3StorageConfigTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/S3StorageConfigTest.kt index 46f50965605..651db853879 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/S3StorageConfigTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/S3StorageConfigTest.kt @@ -22,6 +22,8 @@ internal class S3StorageConfigTest { workloadOutput = "workload-output", log = "log", activityPayload = "activity-payload", + // Audit logging is null by default as it is SME feature only + auditLogging = null, ) val s3StorageConfig = S3StorageConfig( @@ -42,6 +44,39 @@ internal class S3StorageConfigTest { assertEquals(region, envVarMap[EnvVar.AWS_DEFAULT_REGION.name]) } + @Test + internal fun testToEnvVarMapWithAuditLogging() { + val accessKey = "access-key" + val secretAccessKey = "secret-access-key" + val region = Region.US_EAST_1.toString() + val bucketConfig = + StorageBucketConfig( + state = "state", + workloadOutput = "workload-output", + log = "log", + activityPayload = "activity-payload", + auditLogging = "audit-logging", + ) + val s3StorageConfig = + S3StorageConfig( + buckets = bucketConfig, + accessKey = accessKey, + secretAccessKey = secretAccessKey, + region = region, + ) + val envVarMap = s3StorageConfig.toEnvVarMap() + assertEquals(9, envVarMap.size) + assertEquals(bucketConfig.log, envVarMap[EnvVar.STORAGE_BUCKET_LOG.name]) + assertEquals(bucketConfig.workloadOutput, envVarMap[EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT.name]) + assertEquals(bucketConfig.activityPayload, envVarMap[EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD.name]) + assertEquals(bucketConfig.state, envVarMap[EnvVar.STORAGE_BUCKET_STATE.name]) + assertEquals(bucketConfig.auditLogging, envVarMap[EnvVar.STORAGE_BUCKET_AUDIT_LOGGING.name]) + assertEquals(StorageType.S3.name, envVarMap[EnvVar.STORAGE_TYPE.name]) + assertEquals(accessKey, envVarMap[EnvVar.AWS_ACCESS_KEY_ID.name]) + assertEquals(secretAccessKey, envVarMap[EnvVar.AWS_SECRET_ACCESS_KEY.name]) + assertEquals(region, envVarMap[EnvVar.AWS_DEFAULT_REGION.name]) + } + @Test internal fun testToEnvVarMapBlankCredentials() { val region = Region.US_EAST_1.toString() @@ -51,6 +86,7 @@ internal class S3StorageConfigTest { workloadOutput = "workload-output", log = "log", activityPayload = "activity-payload", + auditLogging = "audit-logging", ) val s3StorageConfig = S3StorageConfig( @@ -60,10 +96,11 @@ internal class S3StorageConfigTest { region = region, ) val envVarMap = s3StorageConfig.toEnvVarMap() - assertEquals(6, envVarMap.size) + assertEquals(7, envVarMap.size) assertEquals(bucketConfig.log, envVarMap[EnvVar.STORAGE_BUCKET_LOG.name]) assertEquals(bucketConfig.workloadOutput, envVarMap[EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT.name]) assertEquals(bucketConfig.activityPayload, envVarMap[EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD.name]) + assertEquals(bucketConfig.auditLogging, envVarMap[EnvVar.STORAGE_BUCKET_AUDIT_LOGGING.name]) assertEquals(bucketConfig.state, envVarMap[EnvVar.STORAGE_BUCKET_STATE.name]) assertEquals(StorageType.S3.name, envVarMap[EnvVar.STORAGE_TYPE.name]) assertFalse(envVarMap.containsKey(EnvVar.AWS_ACCESS_KEY_ID.name)) @@ -82,6 +119,7 @@ internal class S3StorageConfigTest { workloadOutput = "workload-output", log = "log", activityPayload = "activity-payload", + auditLogging = "audit-logging", ) val s3StorageConfig = S3StorageConfig( diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt index 5939d742ac5..35cdb7e1c6e 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt @@ -29,7 +29,14 @@ import software.amazon.awssdk.services.s3.model.NoSuchBucketException * When upgrading to Micronaut 4, the `@get:Primary` and `@get:Bean` annotations might be replaceable with @MockBean. */ -private val bucket = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload", activityPayload = "payload") +private val bucket = + StorageBucketConfig( + log = "log", + state = "state", + workloadOutput = "workload", + activityPayload = "payload", + auditLogging = null, + ) @MicronautTest @Property(name = STORAGE_TYPE, value = "local") @@ -46,6 +53,14 @@ class LocalStorageClientFactoryTest { val localStorageConfig: LocalStorageConfig = mockk { every { root } returns "/tmp/test" + every { buckets } returns + StorageBucketConfig( + log = "log", + state = "state", + workloadOutput = "wo", + activityPayload = "ap", + auditLogging = null, + ) } @Test diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt index 185f8621e58..c3d173d02b2 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt @@ -57,7 +57,14 @@ private const val KEY = "a" private const val DOC1 = "hello" private const val DOC2 = "bye" -private val buckets = StorageBucketConfig(log = "log", state = "state", workloadOutput = "workload", activityPayload = "payload") +private val buckets = + StorageBucketConfig( + log = "log", + state = "state", + workloadOutput = "workload", + activityPayload = "payload", + auditLogging = null, + ) internal class DocumentTypeTest { @Test @@ -65,6 +72,7 @@ internal class DocumentTypeTest { assertEquals(DocumentType.LOGS.prefix, Path.of("job-logging")) assertEquals(DocumentType.STATE.prefix, Path.of("/state")) assertEquals(DocumentType.WORKLOAD_OUTPUT.prefix, Path.of("/workload/output")) + assertEquals(DocumentType.AUDIT_LOGS.prefix, Path.of("audit-logging")) } } @@ -427,6 +435,8 @@ internal class LocalStorageClientTest { workloadOutput = "workload-output", log = "log", activityPayload = "activity-payload", + // Audit logging is null by default as it is SME feature only + auditLogging = null, ) val localStorageConfig = LocalStorageConfig( @@ -441,6 +451,32 @@ internal class LocalStorageClientTest { assertEquals(bucketConfig.state, envVarMap[EnvVar.STORAGE_BUCKET_STATE.name]) assertEquals(StorageType.LOCAL.name, envVarMap[EnvVar.STORAGE_TYPE.name]) } + + @Test + internal fun testToEnvVarMapWithAuditLogging() { + val root = "/root/path" + val bucketConfig = + StorageBucketConfig( + state = "state", + workloadOutput = "workload-output", + log = "log", + activityPayload = "activity-payload", + auditLogging = "audit-logging", + ) + val localStorageConfig = + LocalStorageConfig( + buckets = bucketConfig, + root = root, + ) + val envVarMap = localStorageConfig.toEnvVarMap() + assertEquals(6, envVarMap.size) + assertEquals(bucketConfig.log, envVarMap[EnvVar.STORAGE_BUCKET_LOG.name]) + assertEquals(bucketConfig.workloadOutput, envVarMap[EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT.name]) + assertEquals(bucketConfig.activityPayload, envVarMap[EnvVar.STORAGE_BUCKET_ACTIVITY_PAYLOAD.name]) + assertEquals(bucketConfig.state, envVarMap[EnvVar.STORAGE_BUCKET_STATE.name]) + assertEquals(bucketConfig.auditLogging, envVarMap[EnvVar.STORAGE_BUCKET_AUDIT_LOGGING.name]) + assertEquals(StorageType.LOCAL.name, envVarMap[EnvVar.STORAGE_TYPE.name]) + } } internal class MinioStorageClientTest { diff --git a/airbyte-commons-temporal/build.gradle.kts b/airbyte-commons-temporal/build.gradle.kts index 170548b5b62..82f3ef48a1e 100644 --- a/airbyte-commons-temporal/build.gradle.kts +++ b/airbyte-commons-temporal/build.gradle.kts @@ -4,8 +4,8 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + compileOnly("org.projectlombok:lombok:1.18.36") + annotationProcessor("org.projectlombok:lombok:1.18.36") // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java index 79e8886af28..1063f50ca53 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/ConnectionManagerUtils.java @@ -23,18 +23,21 @@ import io.temporal.workflow.Functions.Proc1; import io.temporal.workflow.Functions.TemporalFunctionalInterfaceMarker; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.util.Optional; import java.util.UUID; import java.util.function.Function; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utility functions for connection manager workflows. */ @Singleton -@Slf4j public class ConnectionManagerUtils { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final WorkflowClientWrapped workflowClientWrapped; private final MetricClient metricClient; diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/StreamResetRecordsHelper.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/StreamResetRecordsHelper.java index 944bca65701..9e43e7fd1e4 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/StreamResetRecordsHelper.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/StreamResetRecordsHelper.java @@ -12,17 +12,20 @@ import io.airbyte.persistence.job.JobPersistence; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.List; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper class that provides methods for dealing with stream reset records. */ @Singleton -@Slf4j public class StreamResetRecordsHelper { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final JobPersistence jobPersistence; private final StreamResetPersistence streamResetPersistence; diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java index afea89b44bd..fa3857d8506 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalClient.java @@ -25,11 +25,13 @@ import io.airbyte.commons.temporal.scheduling.SpecWorkflow; import io.airbyte.commons.temporal.scheduling.state.WorkflowState; import io.airbyte.config.ActorContext; +import io.airbyte.config.ConfigScopeType; import io.airbyte.config.ConnectorJobOutput; import io.airbyte.config.JobCheckConnectionConfig; import io.airbyte.config.JobDiscoverCatalogConfig; import io.airbyte.config.JobGetSpecConfig; import io.airbyte.config.RefreshStream.RefreshType; +import io.airbyte.config.ScopedConfiguration; import io.airbyte.config.StandardCheckConnectionInput; import io.airbyte.config.StandardDiscoverCatalogInput; import io.airbyte.config.StreamDescriptor; @@ -37,6 +39,8 @@ import io.airbyte.config.persistence.StreamRefreshesRepository; import io.airbyte.config.persistence.StreamRefreshesRepositoryKt; import io.airbyte.config.persistence.StreamResetPersistence; +import io.airbyte.data.services.ScopedConfigurationService; +import io.airbyte.data.services.shared.NetworkSecurityTokenKey; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.featureflag.UseAsyncActivities; import io.airbyte.featureflag.Workspace; @@ -59,9 +63,12 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.nio.file.Path; +import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.UUID; @@ -73,18 +80,21 @@ import java.util.stream.Collectors; import lombok.Builder; import lombok.Value; -import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.time.StopWatch; +import org.jetbrains.annotations.NotNull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Airbyte's interface over temporal. */ -@Slf4j @Singleton @SuppressWarnings({"PMD.EmptyCatchBlock", "PMD.CompareObjectsWithEquals"}) public class TemporalClient { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + /** * This is used to sleep between 2 temporal queries. The query is needed to ensure that the cancel * and start manual sync methods wait before returning. Since temporal signals are async, we need to @@ -101,6 +111,7 @@ public class TemporalClient { private final StreamResetRecordsHelper streamResetRecordsHelper; private final MetricClient metricClient; private final FeatureFlagClient featureFlagClient; + private final ScopedConfigurationService scopedConfigurationService; private final String uiCommandsQueue; public TemporalClient(@Named("workspaceRootTemporal") final Path workspaceRoot, @@ -112,7 +123,8 @@ public TemporalClient(@Named("workspaceRootTemporal") final Path workspaceRoot, final ConnectionManagerUtils connectionManagerUtils, final StreamResetRecordsHelper streamResetRecordsHelper, final MetricClient metricClient, - final FeatureFlagClient featureFlagClient) { + final FeatureFlagClient featureFlagClient, + final ScopedConfigurationService scopedConfigurationService) { this.workspaceRoot = workspaceRoot; this.uiCommandsQueue = uiCommandsQueue; this.workflowClientWrapped = workflowClientWrapped; @@ -123,6 +135,7 @@ public TemporalClient(@Named("workspaceRootTemporal") final Path workspaceRoot, this.streamResetRecordsHelper = streamResetRecordsHelper; this.metricClient = metricClient; this.featureFlagClient = featureFlagClient; + this.scopedConfigurationService = scopedConfigurationService; } private final Set workflowNames = new HashSet<>(); @@ -470,7 +483,8 @@ public TemporalResponse submitCheckConnection(final UUID job .withActorId(config.getActorId()) .withConnectionConfiguration(config.getConnectionConfiguration()) .withResourceRequirements(config.getResourceRequirements()) - .withActorContext(context); + .withActorContext(context) + .withNetworkSecurityTokens(getNetworkSecurityTokens(workspaceId)); if (!featureFlagClient.boolVariation(UseAsyncActivities.INSTANCE, new Workspace(workspaceId))) { return execute(jobRunConfig, @@ -508,7 +522,8 @@ public TemporalResponse submitDiscoverSchema(final UUID jobI .withPriority(priority); final StandardDiscoverCatalogInput input = new StandardDiscoverCatalogInput().withConnectionConfiguration(config.getConnectionConfiguration()) .withSourceId(config.getSourceId()).withConnectorVersion(config.getConnectorVersion()).withConfigHash(config.getConfigHash()) - .withResourceRequirements(config.getResourceRequirements()).withActorContext(context).withManual(true); + .withResourceRequirements(config.getResourceRequirements()).withActorContext(context).withManual(true) + .withNetworkSecurityTokens(getNetworkSecurityTokens(workspaceId)); if (!featureFlagClient.boolVariation(UseAsyncActivities.INSTANCE, new Workspace(workspaceId))) { return execute(jobRunConfig, @@ -682,4 +697,16 @@ boolean isInRunningWorkflowCache(final String workflowName) { return workflowNames.contains(workflowName); } + private @NotNull List getNetworkSecurityTokens(final UUID workspaceId) { + final Map scopes = Map.of(ConfigScopeType.WORKSPACE, workspaceId); + try { + final List podLabelConfigurations = + scopedConfigurationService.getScopedConfigurations(NetworkSecurityTokenKey.INSTANCE, scopes); + return podLabelConfigurations.stream().map(ScopedConfiguration::getValue).toList(); + } catch (IllegalArgumentException e) { + log.error(e.getMessage()); + return Collections.emptyList(); + } + } + } diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalInitializationUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalInitializationUtils.java index 73b017b3342..4b4baf97ec3 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalInitializationUtils.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalInitializationUtils.java @@ -11,17 +11,20 @@ import io.temporal.serviceclient.WorkflowServiceStubs; import jakarta.inject.Inject; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.util.concurrent.TimeUnit; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Utils for verifying that temporal is running and available. Used at the startup of an * application. */ @Singleton -@Slf4j public class TemporalInitializationUtils { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @Inject private WorkflowServiceStubs temporalService; @Value("${temporal.cloud.namespace}") diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalUtils.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalUtils.java index 0c1deaad2f4..d9d7f37d53d 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalUtils.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/TemporalUtils.java @@ -21,21 +21,24 @@ import io.temporal.serviceclient.WorkflowServiceStubs; import io.temporal.serviceclient.WorkflowServiceStubsOptions; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.nio.file.Path; import java.time.Duration; import java.util.Objects; import java.util.function.Supplier; -import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.time.DurationFormatUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Temporal Utility functions. */ // todo (cgardens) - rename? utils implies it's static utility function -@Slf4j @Singleton public class TemporalUtils { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Duration WAIT_INTERVAL = Duration.ofSeconds(2); private static final Duration MAX_TIME_TO_CONNECT = Duration.ofMinutes(2); private static final Duration WAIT_TIME_AFTER_CONNECT = Duration.ofSeconds(5); diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionManagerWorkflow.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionManagerWorkflow.java index d0c0cab6271..a2f30fadfbc 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionManagerWorkflow.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/ConnectionManagerWorkflow.java @@ -9,9 +9,7 @@ import io.temporal.workflow.SignalMethod; import io.temporal.workflow.WorkflowInterface; import io.temporal.workflow.WorkflowMethod; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; +import java.util.Objects; /** * Temporal workflow that manages running sync jobs for a connection. It handles scheduling, the @@ -76,14 +74,53 @@ public interface ConnectionManagerWorkflow { /** * Job Attempt Information. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class JobInformation { private long jobId; private int attemptId; + public JobInformation() {} + + public JobInformation(long jobId, int attemptId) { + this.jobId = jobId; + this.attemptId = attemptId; + } + + public long getJobId() { + return jobId; + } + + public int getAttemptId() { + return attemptId; + } + + public void setJobId(long jobId) { + this.jobId = jobId; + } + + public void setAttemptId(int attemptId) { + this.attemptId = attemptId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + JobInformation that = (JobInformation) o; + return jobId == that.jobId && attemptId == that.attemptId; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, attemptId); + } + + @Override + public String toString() { + return "JobInformation{jobId=" + jobId + ", attemptId=" + attemptId + '}'; + } + } /** diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowInternalState.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowInternalState.java index 311253b4d32..46e5ba23a01 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowInternalState.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/WorkflowInternalState.java @@ -7,17 +7,11 @@ import io.airbyte.config.FailureReason; import java.util.HashSet; import java.util.Set; -import lombok.Getter; -import lombok.NoArgsConstructor; -import lombok.Setter; /** * Internal state of workflow. */ // todo (cgardens) - how is this different from WorkflowState. -@Getter -@Setter -@NoArgsConstructor public class WorkflowInternalState { private Long jobId = null; @@ -31,4 +25,38 @@ public class WorkflowInternalState { private Set failures = new HashSet<>(); private Boolean partialSuccess = null; + public WorkflowInternalState() {} + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getAttemptNumber() { + return attemptNumber; + } + + public void setAttemptNumber(Integer attemptNumber) { + this.attemptNumber = attemptNumber; + } + + public Set getFailures() { + return failures; + } + + public void setFailures(Set failures) { + this.failures = failures; + } + + public Boolean getPartialSuccess() { + return partialSuccess; + } + + public void setPartialSuccess(Boolean partialSuccess) { + this.partialSuccess = partialSuccess; + } + } diff --git a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/WorkflowStateChangedListener.java b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/WorkflowStateChangedListener.java index 6ba229d6c8d..b218af5c50c 100644 --- a/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/WorkflowStateChangedListener.java +++ b/airbyte-commons-temporal/src/main/java/io/airbyte/commons/temporal/scheduling/state/listener/WorkflowStateChangedListener.java @@ -7,9 +7,9 @@ import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonSubTypes.Type; import com.fasterxml.jackson.annotation.JsonTypeInfo; +import java.util.Objects; import java.util.Queue; import java.util.UUID; -import lombok.Value; /** * Listen for changes to the WorkflowState so that they can be communicated to a running @@ -48,12 +48,43 @@ enum StateField { /** * Container for transmitting changes to workflow state fields for a connection manager workflow. */ - @Value class ChangedStateEvent { private final StateField field; private final boolean value; + public ChangedStateEvent(StateField field, boolean value) { + this.field = field; + this.value = value; + } + + public StateField getField() { + return field; + } + + public boolean isValue() { + return value; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + ChangedStateEvent that = (ChangedStateEvent) o; + return value == that.value && field == that.field; + } + + @Override + public int hashCode() { + return Objects.hash(field, value); + } + + @Override + public String toString() { + return "ChangedStateEvent{field=" + field + ", value=" + value + '}'; + } + } Queue events(UUID testId); diff --git a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java index 73a743810f9..49dc1e49c25 100644 --- a/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java +++ b/airbyte-commons-temporal/src/test/java/io/airbyte/commons/temporal/TemporalClientTest.java @@ -8,6 +8,7 @@ import static io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow.NON_RUNNING_JOB_ID; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; @@ -26,11 +27,13 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.temporal.TemporalClient.ManualOperationResult; import io.airbyte.commons.temporal.exception.DeletedWorkflowException; -import io.airbyte.commons.temporal.scheduling.CheckConnectionWorkflow; +import io.airbyte.commons.temporal.scheduling.CheckCommandInput; import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow; import io.airbyte.commons.temporal.scheduling.ConnectionManagerWorkflow.JobInformation; -import io.airbyte.commons.temporal.scheduling.DiscoverCatalogWorkflow; -import io.airbyte.commons.temporal.scheduling.SpecWorkflow; +import io.airbyte.commons.temporal.scheduling.ConnectorCommandInput; +import io.airbyte.commons.temporal.scheduling.ConnectorCommandWorkflow; +import io.airbyte.commons.temporal.scheduling.DiscoverCommandInput; +import io.airbyte.commons.temporal.scheduling.SpecCommandInput; import io.airbyte.commons.temporal.scheduling.state.WorkflowState; import io.airbyte.config.ActorContext; import io.airbyte.config.ConnectorJobOutput; @@ -39,12 +42,11 @@ import io.airbyte.config.JobDiscoverCatalogConfig; import io.airbyte.config.JobGetSpecConfig; import io.airbyte.config.RefreshStream.RefreshType; -import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.config.StandardDiscoverCatalogInput; import io.airbyte.config.StreamDescriptor; import io.airbyte.config.WorkloadPriority; import io.airbyte.config.persistence.StreamRefreshesRepository; import io.airbyte.config.persistence.StreamResetPersistence; +import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.featureflag.TestClient; import io.airbyte.metrics.lib.MetricClient; import io.airbyte.persistence.job.models.IntegrationLauncherConfig; @@ -88,6 +90,7 @@ public class TemporalClientTest { private static final String CHECK_TASK_QUEUE = "CHECK_CONNECTION"; private static final String DISCOVER_TASK_QUEUE = "DISCOVER_SCHEMA"; + private static final String UI_COMMANDS_TASK_QUEUE = "ui-commands-queue"; private static final JobRunConfig JOB_RUN_CONFIG = new JobRunConfig() .withJobId(String.valueOf(JOB_ID)) .withAttemptId((long) ATTEMPT_ID); @@ -131,12 +134,13 @@ void setup() throws IOException { final var metricClient = mock(MetricClient.class); final var workflowClientWrapped = new WorkflowClientWrapped(workflowClient, metricClient); final var workflowServiceStubsWrapped = new WorkflowServiceStubsWrapped(workflowServiceStubs, metricClient); + final var scopedConfigurationService = mock(ScopedConfigurationService.class); connectionManagerUtils = spy(new ConnectionManagerUtils(workflowClientWrapped, metricClient)); streamResetRecordsHelper = mock(StreamResetRecordsHelper.class); temporalClient = spy(new TemporalClient(workspaceRoot, uiCommandsQueue, workflowClientWrapped, workflowServiceStubsWrapped, streamResetPersistence, streamRefreshesRepository, - connectionManagerUtils, streamResetRecordsHelper, mock(MetricClient.class), new TestClient())); + connectionManagerUtils, streamResetRecordsHelper, mock(MetricClient.class), new TestClient(), scopedConfigurationService)); } @Nested @@ -149,10 +153,11 @@ void init() { mConnectionManagerUtils = mock(ConnectionManagerUtils.class); final var metricClient = mock(MetricClient.class); + final var scopedConfigurationService = mock(ScopedConfigurationService.class); temporalClient = spy( new TemporalClient(workspaceRoot, uiCommandsQueue, new WorkflowClientWrapped(workflowClient, metricClient), new WorkflowServiceStubsWrapped(workflowServiceStubs, metricClient), streamResetPersistence, streamRefreshesRepository, - mConnectionManagerUtils, streamResetRecordsHelper, metricClient, new TestClient())); + mConnectionManagerUtils, streamResetRecordsHelper, metricClient, new TestClient(), scopedConfigurationService)); } @Test @@ -233,52 +238,62 @@ class TestJobSubmission { @Test void testSubmitGetSpec() { - final SpecWorkflow specWorkflow = mock(SpecWorkflow.class); - when(workflowClient.newWorkflowStub(SpecWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.GET_SPEC, JOB_UUID))) + final ConnectorCommandWorkflow specWorkflow = mock(ConnectorCommandWorkflow.class); + when(workflowClient.newWorkflowStub(eq(ConnectorCommandWorkflow.class), any(WorkflowOptions.class))) .thenReturn(specWorkflow); final JobGetSpecConfig getSpecConfig = new JobGetSpecConfig().withDockerImage(IMAGE_NAME1); temporalClient.submitGetSpec(JOB_UUID, ATTEMPT_ID, WORKSPACE_ID, getSpecConfig); - specWorkflow.run(JOB_RUN_CONFIG, UUID_LAUNCHER_CONFIG); - verify(workflowClient).newWorkflowStub(SpecWorkflow.class, TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.GET_SPEC, JOB_UUID)); + + final ArgumentCaptor workflowOptionsCaptor = ArgumentCaptor.forClass(WorkflowOptions.class); + verify(workflowClient).newWorkflowStub(eq(ConnectorCommandWorkflow.class), workflowOptionsCaptor.capture()); + assertEquals(UI_COMMANDS_TASK_QUEUE, workflowOptionsCaptor.getValue().getTaskQueue()); + + final ArgumentCaptor connectorCommandInputCaptor = ArgumentCaptor.forClass(ConnectorCommandInput.class); + verify(specWorkflow).run(connectorCommandInputCaptor.capture()); + assertInstanceOf(SpecCommandInput.class, connectorCommandInputCaptor.getValue()); } @Test void testSubmitCheckConnection() { - final CheckConnectionWorkflow checkConnectionWorkflow = mock(CheckConnectionWorkflow.class); + final ConnectorCommandWorkflow checkConnectionWorkflow = mock(ConnectorCommandWorkflow.class); when( - workflowClient.newWorkflowStub(CheckConnectionWorkflow.class, - TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.CHECK_CONNECTION, JOB_UUID))) - .thenReturn(checkConnectionWorkflow); + workflowClient.newWorkflowStub(eq(ConnectorCommandWorkflow.class), any(WorkflowOptions.class))) + .thenReturn(checkConnectionWorkflow); final JobCheckConnectionConfig checkConnectionConfig = new JobCheckConnectionConfig() .withDockerImage(IMAGE_NAME1) .withConnectionConfiguration(Jsons.emptyObject()); - final StandardCheckConnectionInput input = new StandardCheckConnectionInput() - .withConnectionConfiguration(checkConnectionConfig.getConnectionConfiguration()); temporalClient.submitCheckConnection(JOB_UUID, ATTEMPT_ID, WORKSPACE_ID, CHECK_TASK_QUEUE, checkConnectionConfig, new ActorContext()); - checkConnectionWorkflow.run(JOB_RUN_CONFIG, UUID_LAUNCHER_CONFIG, input); - verify(workflowClient).newWorkflowStub(CheckConnectionWorkflow.class, - TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.CHECK_CONNECTION, JOB_UUID)); + + final ArgumentCaptor workflowOptionsCaptor = ArgumentCaptor.forClass(WorkflowOptions.class); + verify(workflowClient).newWorkflowStub(eq(ConnectorCommandWorkflow.class), workflowOptionsCaptor.capture()); + assertEquals(UI_COMMANDS_TASK_QUEUE, workflowOptionsCaptor.getValue().getTaskQueue()); + + final ArgumentCaptor connectorCommandInputCaptor = ArgumentCaptor.forClass(ConnectorCommandInput.class); + verify(checkConnectionWorkflow).run(connectorCommandInputCaptor.capture()); + assertInstanceOf(CheckCommandInput.class, connectorCommandInputCaptor.getValue()); } @Test void testSubmitDiscoverSchema() { - final DiscoverCatalogWorkflow discoverCatalogWorkflow = mock(DiscoverCatalogWorkflow.class); - when(workflowClient.newWorkflowStub(DiscoverCatalogWorkflow.class, - TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.DISCOVER_SCHEMA, JOB_UUID))) - .thenReturn(discoverCatalogWorkflow); + final ConnectorCommandWorkflow discoverCatalogWorkflow = mock(ConnectorCommandWorkflow.class); + when(workflowClient.newWorkflowStub(eq(ConnectorCommandWorkflow.class), any(WorkflowOptions.class))) + .thenReturn(discoverCatalogWorkflow); final JobDiscoverCatalogConfig checkConnectionConfig = new JobDiscoverCatalogConfig() .withDockerImage(IMAGE_NAME1) .withConnectionConfiguration(Jsons.emptyObject()); - final StandardDiscoverCatalogInput input = new StandardDiscoverCatalogInput() - .withConnectionConfiguration(checkConnectionConfig.getConnectionConfiguration()); temporalClient.submitDiscoverSchema(JOB_UUID, ATTEMPT_ID, WORKSPACE_ID, DISCOVER_TASK_QUEUE, checkConnectionConfig, new ActorContext(), WorkloadPriority.DEFAULT); - discoverCatalogWorkflow.run(JOB_RUN_CONFIG, UUID_LAUNCHER_CONFIG, input); - verify(workflowClient).newWorkflowStub(DiscoverCatalogWorkflow.class, - TemporalWorkflowUtils.buildWorkflowOptions(TemporalJobType.DISCOVER_SCHEMA, JOB_UUID)); + + final ArgumentCaptor workflowOptionsCaptor = ArgumentCaptor.forClass(WorkflowOptions.class); + verify(workflowClient).newWorkflowStub(eq(ConnectorCommandWorkflow.class), workflowOptionsCaptor.capture()); + assertEquals(UI_COMMANDS_TASK_QUEUE, workflowOptionsCaptor.getValue().getTaskQueue()); + + final ArgumentCaptor connectorCommandInputCaptor = ArgumentCaptor.forClass(ConnectorCommandInput.class); + verify(discoverCatalogWorkflow).run(connectorCommandInputCaptor.capture()); + assertInstanceOf(DiscoverCommandInput.class, connectorCommandInputCaptor.getValue()); } } diff --git a/airbyte-commons-with-dependencies/build.gradle.kts b/airbyte-commons-with-dependencies/build.gradle.kts index 51b21cdac7e..af69a9794e0 100644 --- a/airbyte-commons-with-dependencies/build.gradle.kts +++ b/airbyte-commons-with-dependencies/build.gradle.kts @@ -4,8 +4,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(libs.bundles.micronaut.annotation.processor) implementation(project(":oss:airbyte-commons")) diff --git a/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/workers/config/WorkerConfigsProvider.java b/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/workers/config/WorkerConfigsProvider.java index f54ed6f0eb6..1c8ed55576f 100644 --- a/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/workers/config/WorkerConfigsProvider.java +++ b/airbyte-commons-with-dependencies/src/main/java/io/airbyte/commons/workers/config/WorkerConfigsProvider.java @@ -25,7 +25,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; /** * Provide WorkerConfigs. @@ -34,7 +33,6 @@ * `airbyte.worker.kube-job-configs` key. */ @Singleton -@Slf4j public class WorkerConfigsProvider implements ResourceRequirementsProvider { /** diff --git a/airbyte-commons-worker/build.gradle.kts b/airbyte-commons-worker/build.gradle.kts index 7f891b93dd7..7b08b87bd92 100644 --- a/airbyte-commons-worker/build.gradle.kts +++ b/airbyte-commons-worker/build.gradle.kts @@ -4,8 +4,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) @@ -64,8 +62,6 @@ dependencies { implementation(project(":oss:airbyte-worker-models")) implementation(libs.jakarta.validation.api) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java index 41bcc21a312..eb7d99889e1 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/ReplicationInputHydrator.java @@ -53,6 +53,7 @@ import io.airbyte.persistence.job.models.ReplicationInput; import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.helper.BackfillHelper; +import io.airbyte.workers.helper.MapperSecretHydrationHelper; import io.airbyte.workers.helper.ResumableFullRefreshStatsHelper; import io.airbyte.workers.input.ReplicationInputMapper; import io.airbyte.workers.models.JobInput; @@ -77,6 +78,7 @@ public class ReplicationInputHydrator { private final AirbyteApiClient airbyteApiClient; private final ResumableFullRefreshStatsHelper resumableFullRefreshStatsHelper; private final SecretsRepositoryReader secretsRepositoryReader; + private final MapperSecretHydrationHelper mapperSecretHydrationHelper; private final ReplicationInputMapper mapper; private final Boolean useRuntimeSecretPersistence; @@ -89,6 +91,7 @@ public class ReplicationInputHydrator { public ReplicationInputHydrator(final AirbyteApiClient airbyteApiClient, final ResumableFullRefreshStatsHelper resumableFullRefreshStatsHelper, final SecretsRepositoryReader secretsRepositoryReader, + final MapperSecretHydrationHelper mapperSecretHydrationHelper, final BackfillHelper backfillHelper, final CatalogClientConverters catalogClientConverters, final ReplicationInputMapper mapper, @@ -99,6 +102,7 @@ public ReplicationInputHydrator(final AirbyteApiClient airbyteApiClient, this.catalogClientConverters = catalogClientConverters; this.resumableFullRefreshStatsHelper = resumableFullRefreshStatsHelper; this.secretsRepositoryReader = secretsRepositoryReader; + this.mapperSecretHydrationHelper = mapperSecretHydrationHelper; this.mapper = mapper; this.metricClient = metricClient; this.useRuntimeSecretPersistence = useRuntimeSecretPersistence; @@ -173,7 +177,7 @@ public ReplicationInput getHydratedReplicationInput(final ReplicationActivityInp : airbyteApiClient.getConnectionApi().getConnection(new ConnectionIdRequestBody(replicationActivityInput.getConnectionId())); final ConfiguredAirbyteCatalog catalog = retrieveCatalog(connectionInfo); - if (replicationActivityInput.getIsReset()) { + if (replicationActivityInput.isReset()) { // If this is a reset, we need to set the streams being reset to Full Refresh | Overwrite. updateCatalogForReset(replicationActivityInput, catalog); } @@ -242,10 +246,14 @@ public ReplicationInput getHydratedReplicationInput(final ReplicationActivityInp } } + // Hydrate mapper secrets + final ConfiguredAirbyteCatalog hydratedCatalog = + mapperSecretHydrationHelper.hydrateMapperSecrets(catalog, useRuntimeSecretPersistence, organizationId); + return mapper.toReplicationInput(replicationActivityInput) .withSourceConfiguration(fullSourceConfig) .withDestinationConfiguration(fullDestinationConfig) - .withCatalog(catalog) + .withCatalog(hydratedCatalog) .withState(state) .withDestinationSupportsRefreshes(resolvedDestinationVersion.getSupportRefreshes()); } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java index 15328f2de27..fce79ce92cf 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/general/ReplicationWorkerFactory.java @@ -17,6 +17,7 @@ import io.airbyte.commons.protocol.AirbyteMessageSerDeProvider; import io.airbyte.commons.protocol.AirbyteProtocolVersionedMigratorFactory; import io.airbyte.config.ConfiguredAirbyteCatalog; +import io.airbyte.config.JobSyncConfig; import io.airbyte.featureflag.Connection; import io.airbyte.featureflag.Context; import io.airbyte.featureflag.Destination; @@ -69,13 +70,15 @@ import io.airbyte.workload.api.client.WorkloadApiClient; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.time.Duration; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Factory for the BufferedReplicationWorker. @@ -85,9 +88,10 @@ * dependencies of the DefaultReplicationWorker were stateless. */ @Singleton -@Slf4j public class ReplicationWorkerFactory { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AirbyteMessageSerDeProvider serDeProvider; private final AirbyteProtocolVersionedMigratorFactory migratorFactory; private final GsonPksExtractor gsonPksExtractor; @@ -167,7 +171,7 @@ public BufferedReplicationWorker create(final ReplicationInput replicationInput, // reset jobs use an empty source to induce resetting all data in destination. final var airbyteSource = replicationInput.getIsReset() - ? new EmptyAirbyteSource() + ? new EmptyAirbyteSource(replicationInput.getNamespaceDefinition() == JobSyncConfig.NamespaceDefinitionType.CUSTOMFORMAT) : new LocalContainerAirbyteSource( heartbeatMonitor, getStreamFactory(sourceLauncherConfig, replicationInput.getCatalog(), SOURCE_LOG_MDC_BUILDER, invalidLineConfig), diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/EmptyAirbyteSource.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/EmptyAirbyteSource.java index 06f12b4b863..d1fcc7c5cc9 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/EmptyAirbyteSource.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/EmptyAirbyteSource.java @@ -23,6 +23,7 @@ import io.airbyte.protocol.models.AirbyteStreamState; import io.airbyte.protocol.models.AirbyteStreamStatusTraceMessage.AirbyteStreamStatus; import io.airbyte.workers.test_utils.AirbyteMessageUtils; +import java.lang.invoke.MethodHandles; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashSet; @@ -33,15 +34,18 @@ import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This source will never emit any messages. It can be used in cases where that is helpful (hint: * reset connection jobs). */ -@Slf4j public class EmptyAirbyteSource implements AirbyteSource { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private final AtomicBoolean hasCustomNamespace; private final AtomicBoolean hasEmittedState; private final AtomicBoolean hasEmittedStreamStatus; private final Queue streamsToReset = new LinkedList<>(); @@ -49,7 +53,8 @@ public class EmptyAirbyteSource implements AirbyteSource { private boolean isStarted = false; private Optional stateWrapper = Optional.empty(); - public EmptyAirbyteSource() { + public EmptyAirbyteSource(final boolean hasCustomNamespace) { + this.hasCustomNamespace = new AtomicBoolean(hasCustomNamespace); hasEmittedState = new AtomicBoolean(); hasEmittedStreamStatus = new AtomicBoolean(); } @@ -82,7 +87,7 @@ public void start(final WorkerSourceConfig workerSourceConfig, final Path jobRoo @Trace(operationName = WORKER_OPERATION_NAME) @Override public boolean isFinished() { - return hasEmittedState.get() && hasEmittedStreamStatus.get(); + return hasEmittedState.get() && (hasEmittedStreamStatus.get() || hasCustomNamespace.get()); } @Trace(operationName = WORKER_OPERATION_NAME) @@ -137,9 +142,13 @@ private Optional emitPerStreamState() { // Per stream, we emit one 'started', one null state and one 'complete' message. // Since there's only 1 state message we move directly from 'started' to 'complete'. final var s = ProtocolConverters.toProtocol(streamsToReset.poll()); - perStreamMessages.add(AirbyteMessageUtils.createStatusTraceMessage(s, AirbyteStreamStatus.STARTED)); + if (!hasCustomNamespace.get()) { + perStreamMessages.add(AirbyteMessageUtils.createStatusTraceMessage(s, AirbyteStreamStatus.STARTED)); + } perStreamMessages.add(buildNullStreamStateMessage(s)); - perStreamMessages.add(AirbyteMessageUtils.createStatusTraceMessage(s, AirbyteStreamStatus.COMPLETE)); + if (!hasCustomNamespace.get()) { + perStreamMessages.add(AirbyteMessageUtils.createStatusTraceMessage(s, AirbyteStreamStatus.COMPLETE)); + } } final AirbyteMessage message = perStreamMessages.poll(); @@ -177,8 +186,10 @@ private Optional emitStreamResetTraceMessagesForSingleStateTypes // Per stream, we emit one 'started' and one 'complete' message. // The single null state message is to be emitted by the caller. final var s = ProtocolConverters.toProtocol(streamsToReset.poll()); - perStreamMessages.add(AirbyteMessageUtils.createStatusTraceMessage(s, AirbyteStreamStatus.STARTED)); - perStreamMessages.add(AirbyteMessageUtils.createStatusTraceMessage(s, AirbyteStreamStatus.COMPLETE)); + if (!hasCustomNamespace.get()) { + perStreamMessages.add(AirbyteMessageUtils.createStatusTraceMessage(s, AirbyteStreamStatus.STARTED)); + perStreamMessages.add(AirbyteMessageUtils.createStatusTraceMessage(s, AirbyteStreamStatus.COMPLETE)); + } } final AirbyteMessage message = perStreamMessages.poll(); diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/FieldSelector.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/FieldSelector.java index 3a0d5ed33b2..ab4d2a22688 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/FieldSelector.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/FieldSelector.java @@ -13,6 +13,7 @@ import io.airbyte.protocol.models.AirbyteStreamNameNamespacePair; import io.airbyte.workers.RecordSchemaValidator; import io.airbyte.workers.WorkerMetricReporter; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -25,15 +26,17 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import kotlin.text.Regex; -import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.ImmutablePair; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Handles FieldSelection. */ -@Slf4j public class FieldSelector { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final Regex PROTECTED_JSON_SCHEMA_KEYS = new Regex("^\\$(id|comment|schema)$"); /* diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatMonitor.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatMonitor.java index 30c6d8645dc..7cc095a5fc4 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatMonitor.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/internal/HeartbeatMonitor.java @@ -10,7 +10,6 @@ import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Supplier; -import lombok.Getter; /** * Tracks heartbeats and, when asked, says if it has been too long since the last heartbeat. He's @@ -20,7 +19,6 @@ */ public class HeartbeatMonitor { - @Getter private final Duration heartbeatFreshnessThreshold; private final Supplier nowSupplier; private final AtomicReference lastBeat; @@ -67,4 +65,8 @@ public Optional getTimeSinceLastBeat() { } } + public Duration getHeartbeatFreshnessThreshold() { + return heartbeatFreshnessThreshold; + } + } diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/CheckConnectionInput.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/CheckConnectionInput.java deleted file mode 100644 index 75c5e17c7b2..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/CheckConnectionInput.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * CheckConnectionInput. - */ -@Data -@NoArgsConstructor -@AllArgsConstructor -@SuppressWarnings("PMD.AvoidFieldNameMatchingTypeName") -public class CheckConnectionInput { - - private JobRunConfig jobRunConfig; - private IntegrationLauncherConfig launcherConfig; - private StandardCheckConnectionInput checkConnectionInput; - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/DiscoverCatalogInput.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/DiscoverCatalogInput.java deleted file mode 100644 index 1a2e06dffd6..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/DiscoverCatalogInput.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import io.airbyte.config.StandardDiscoverCatalogInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -@Data -@NoArgsConstructor -@AllArgsConstructor -@SuppressWarnings("PMD.AvoidFieldNameMatchingTypeName") -public class DiscoverCatalogInput { - - private JobRunConfig jobRunConfig; - private IntegrationLauncherConfig launcherConfig; - private StandardDiscoverCatalogInput discoverCatalogInput; - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/GeneratedJobInput.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/GeneratedJobInput.java deleted file mode 100644 index 52fd30e5b3c..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/GeneratedJobInput.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import io.airbyte.config.StandardSyncInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * Generated job input. - */ -@Data -@NoArgsConstructor -@AllArgsConstructor -public class GeneratedJobInput { - - private JobRunConfig jobRunConfig; - private IntegrationLauncherConfig sourceLauncherConfig; - private IntegrationLauncherConfig destinationLauncherConfig; - private StandardSyncInput syncInput; - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogOutput.kt b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogOutput.kt deleted file mode 100644 index 66c6c4cf473..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogOutput.kt +++ /dev/null @@ -1,18 +0,0 @@ -package io.airbyte.workers.models - -import io.airbyte.config.CatalogDiff - -/** - * A very basic discriminated union of a successful catalog postprocess and an error. Allows bypassing - * extraneous exception wrapping / propagation. Written naively to allow interop with Java. - */ -data class PostprocessCatalogOutput private constructor(val diff: CatalogDiff?, val message: String?, val stackTrace: String?) { - val isSuccess = message == null && stackTrace == null - val isFailure = !isSuccess - - companion object { - fun success(diff: CatalogDiff?): PostprocessCatalogOutput = PostprocessCatalogOutput(diff, null, null) - - fun failure(t: Throwable): PostprocessCatalogOutput = PostprocessCatalogOutput(null, t.message, t.stackTraceToString()) - } -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/SecretMetadata.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/SecretMetadata.java deleted file mode 100644 index 527d192842f..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/SecretMetadata.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -/** - * Secret metadata used to determine the secret name & key. Used to build a secret key selector to - * push in an env var to worker pods. - * - * @param secretName name of the secret - * @param secretKey key of the secret which holds the value. - */ -public record SecretMetadata(String secretName, String secretKey) { - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/SpecInput.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/SpecInput.java deleted file mode 100644 index 57c8bc31924..00000000000 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/SpecInput.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -@Data -@NoArgsConstructor -@AllArgsConstructor -public class SpecInput { - - private JobRunConfig jobRunConfig; - private IntegrationLauncherConfig launcherConfig; - -} diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/pod/Metadata.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/pod/Metadata.java index 87faf3f9b01..74dc765b45e 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/pod/Metadata.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/pod/Metadata.java @@ -19,6 +19,7 @@ public final class Metadata { public static final String WORKER_POD_LABEL_KEY = "airbyte"; public static final String WORKER_POD_LABEL_VALUE = "job-pod"; public static final String CONNECTION_ID_LABEL_KEY = "connection_id"; + public static final String ACTOR_ID_LABEL_KEY = "actor_id"; public static final String IMAGE_NAME = "image_name"; public static final String IMAGE_VERSION = "image_version"; public static final String ACTOR_TYPE = "actor_type"; diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java index daaa11e5639..c49726d03a8 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/test_utils/TestConfigHelpers.java @@ -66,7 +66,9 @@ public static ImmutablePair createSyncConfig(fi .withWorkspaceId(replicationInput.getWorkspaceId()) .withConnectionContext(new ConnectionContext() .withOrganizationId(organizationId) - .withSourceDefinitionId(sourceDefinitionId))); + .withSourceDefinitionId(sourceDefinitionId)) + .withUseAsyncActivities(true) + .withUseAsyncReplicate(true)); } public static ImmutablePair createReplicationConfig() { diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/StateCheckSumCountEventHandler.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/StateCheckSumCountEventHandler.kt index c6bb24e776a..c3ff456b395 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/StateCheckSumCountEventHandler.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/general/StateCheckSumCountEventHandler.kt @@ -464,10 +464,11 @@ class StateCheckSumCountEventHandler( " Hash collisions were observed so count comparison result may be wrong." } + if (includeStreamInLogs) { - " Observed the following record counts per stream: \n" + - streamPlatformRecordCounts.forEach { (name, count) -> - " $name : $count\n" - } + val namesAndCounts = + streamPlatformRecordCounts.map { (name, count) -> + " $name : $count" + }.joinToString("\n") + " Observed the following record counts per stream: \n$namesAndCounts" } else { "" } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/Hasher.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/Hasher.kt new file mode 100644 index 00000000000..feca4649802 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/Hasher.kt @@ -0,0 +1,8 @@ +package io.airbyte.workers.hashing + +interface Hasher { + fun hash( + value: String, + salt: String? = null, + ): String +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/Sha256Hasher.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/Sha256Hasher.kt new file mode 100644 index 00000000000..f29a6caa131 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/Sha256Hasher.kt @@ -0,0 +1,21 @@ +package io.airbyte.workers.hashing + +import io.micronaut.context.annotation.Primary +import jakarta.inject.Singleton + +@Singleton +@Primary +class Sha256Hasher : Hasher { + override fun hash( + value: String, + salt: String?, + ): String { + val bytes = value.toByteArray() + val md = java.security.MessageDigest.getInstance("SHA-256") + salt?.let { + md.update(salt.toByteArray()) + } + val digest = md.digest(bytes) + return digest.fold("") { str, it -> str + "%02x".format(it) } + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/TestHasher.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/TestHasher.kt new file mode 100644 index 00000000000..70311dcc106 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/hashing/TestHasher.kt @@ -0,0 +1,15 @@ +package io.airbyte.workers.hashing + +import io.micronaut.context.annotation.Secondary +import jakarta.inject.Singleton + +@Singleton +@Secondary +class TestHasher : Hasher { + override fun hash( + value: String, + salt: String?, + ): String { + return value + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/MapperSecretHydrationHelper.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/MapperSecretHydrationHelper.kt new file mode 100644 index 00000000000..b55b0a3c690 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/helper/MapperSecretHydrationHelper.kt @@ -0,0 +1,114 @@ +package io.airbyte.workers.helper + +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.api.client.AirbyteApiClient +import io.airbyte.api.client.model.generated.ScopeType +import io.airbyte.api.client.model.generated.SecretPersistenceConfig +import io.airbyte.api.client.model.generated.SecretPersistenceConfigGetRequestBody +import io.airbyte.commons.enums.Enums +import io.airbyte.commons.json.Jsons +import io.airbyte.config.ConfiguredAirbyteCatalog +import io.airbyte.config.ConfiguredMapper +import io.airbyte.config.MapperConfig +import io.airbyte.config.secrets.SecretsHelpers +import io.airbyte.config.secrets.SecretsRepositoryReader +import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence +import io.airbyte.mappers.transformations.Mapper +import io.airbyte.mappers.transformations.MapperSpec +import jakarta.inject.Singleton +import java.util.UUID + +@Singleton +class MapperSecretHydrationHelper( + private val mappers: List>, + private val secretsRepositoryReader: SecretsRepositoryReader, + private val airbyteApiClient: AirbyteApiClient, +) { + private fun getMapper(name: String): Mapper { + return mappers.first { it.name == name } + } + + private fun specHasSecrets(spec: JsonNode): Boolean { + return SecretsHelpers.getSortedSecretPaths(spec).isNotEmpty() + } + + private fun getConfigSchema(mapperSpec: MapperSpec<*>): JsonNode { + val mapperSpecSchema = mapperSpec.jsonSchema() + if (!mapperSpecSchema.has("properties") || !mapperSpecSchema.get("properties").has("config")) { + throw IllegalStateException("Mapper spec schema does not have a config property") + } + return mapperSpecSchema.get("properties").get("config") + } + + private fun getRuntimeSecretPersistence(organizationId: UUID): RuntimeSecretPersistence { + val secretPersistenceConfig: SecretPersistenceConfig = + airbyteApiClient.secretPersistenceConfigApi.getSecretsPersistenceConfig( + SecretPersistenceConfigGetRequestBody(ScopeType.ORGANIZATION, organizationId), + ) + return RuntimeSecretPersistence( + io.airbyte.config.SecretPersistenceConfig().withScopeType( + Enums.convertTo(secretPersistenceConfig.scopeType, io.airbyte.config.ScopeType::class.java), + ) + .withScopeId(secretPersistenceConfig.scopeId) + .withConfiguration(Jsons.deserializeToStringMap(secretPersistenceConfig.configuration)) + .withSecretPersistenceType( + Enums.convertTo( + secretPersistenceConfig.secretPersistenceType, + io.airbyte.config.SecretPersistenceConfig.SecretPersistenceType::class.java, + ), + ), + ) + } + + private fun hydrateMapperConfigSecrets( + mapperConfig: MapperConfig, + organizationId: UUID?, + useRuntimePersistence: Boolean, + ): MapperConfig { + val mapperName = mapperConfig.name() + val mapperInstance = getMapper(mapperName) + val mapperConfigSchema = getConfigSchema(mapperInstance.spec()) + + if (!specHasSecrets(mapperConfigSchema)) { + // Nothing to do, no secrets in spec + return mapperConfig + } + + val configAsJson = Jsons.jsonNode(mapperConfig.config()) + + val hydratedConfigJson = + if (useRuntimePersistence && organizationId != null) { + val secretPersistence = getRuntimeSecretPersistence(organizationId) + secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence( + configAsJson, + secretPersistence, + ) + } else { + secretsRepositoryReader.hydrateConfigFromDefaultSecretPersistence(configAsJson) + } + + val hydratedConfig: MapperConfig = mapperInstance.spec().deserialize(ConfiguredMapper(mapperName, hydratedConfigJson!!)) + return hydratedConfig + } + + /** + * Given a catalog with mapper configurations, hydrate the secrets in the configurations and return the hydrated catalog. + */ + fun hydrateMapperSecrets( + catalog: ConfiguredAirbyteCatalog, + useRuntimePersistence: Boolean, + organizationId: UUID?, + ): ConfiguredAirbyteCatalog { + return catalog.copy( + streams = + catalog.streams.map { stream -> + stream.copy( + mappers = + stream.mappers.map { + hydrateMapperConfigSecrets(it, organizationId, useRuntimePersistence) + }, + ) + }, + ) + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/input/ReplicationInputMapper.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/input/ReplicationInputMapper.kt index 8b1232642aa..e2e825713a9 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/input/ReplicationInputMapper.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/input/ReplicationInputMapper.kt @@ -44,5 +44,6 @@ class ReplicationInputMapper { .withDestinationConfiguration(replicationActivityInput.destinationConfiguration) .withConnectionContext(replicationActivityInput.connectionContext) .withUseFileTransfer(useFileTransfer) + .withNetworkSecurityTokens(replicationActivityInput.networkSecurityTokens) } } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/CheckConnectionInput.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/CheckConnectionInput.kt new file mode 100644 index 00000000000..81a1efe8aac --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/CheckConnectionInput.kt @@ -0,0 +1,17 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.models + +import io.airbyte.config.StandardCheckConnectionInput +import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig + +/** + * CheckConnectionInput. + */ +data class CheckConnectionInput( + var jobRunConfig: JobRunConfig, + var launcherConfig: IntegrationLauncherConfig, + var checkConnectionInput: StandardCheckConnectionInput, +) diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/DiscoverCatalogInput.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/DiscoverCatalogInput.kt new file mode 100644 index 00000000000..8cb99959440 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/DiscoverCatalogInput.kt @@ -0,0 +1,14 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.models + +import io.airbyte.config.StandardDiscoverCatalogInput +import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig + +data class DiscoverCatalogInput( + var jobRunConfig: JobRunConfig, + var launcherConfig: IntegrationLauncherConfig, + var discoverCatalogInput: StandardDiscoverCatalogInput, +) diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/GeneratedJobInput.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/GeneratedJobInput.kt new file mode 100644 index 00000000000..4aa00cbbda0 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/GeneratedJobInput.kt @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.models + +import io.airbyte.config.StandardSyncInput +import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig + +/** + * Generated job input. + */ +data class GeneratedJobInput( + private var jobRunConfig: JobRunConfig? = null, + private var sourceLauncherConfig: IntegrationLauncherConfig? = null, + private var destinationLauncherConfig: IntegrationLauncherConfig? = null, + private var syncInput: StandardSyncInput? = null, +) diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogInput.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/PostprocessCatalogInput.kt similarity index 68% rename from airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogInput.kt rename to airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/PostprocessCatalogInput.kt index 9577d1e87ae..33a36b3fa78 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/models/PostprocessCatalogInput.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/PostprocessCatalogInput.kt @@ -2,4 +2,6 @@ package io.airbyte.workers.models import java.util.UUID -data class PostprocessCatalogInput(val catalogId: UUID?, val connectionId: UUID?) +data class PostprocessCatalogInput(val catalogId: UUID?, val connectionId: UUID?) { + constructor() : this(null, null) +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/PostprocessCatalogOutput.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/PostprocessCatalogOutput.kt new file mode 100644 index 00000000000..717fbab239b --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/PostprocessCatalogOutput.kt @@ -0,0 +1,38 @@ +package io.airbyte.workers.models + +import com.fasterxml.jackson.annotation.JsonIgnore +import com.fasterxml.jackson.annotation.JsonIgnoreProperties +import com.fasterxml.jackson.databind.annotation.JsonDeserialize +import io.airbyte.config.CatalogDiff + +/** + * A very basic discriminated union of a successful catalog postprocess and an error. Allows bypassing + * extraneous exception wrapping / propagation. Written naively to allow interop with Java. + */ +@JsonDeserialize(builder = PostprocessCatalogOutput.Builder::class) +data class PostprocessCatalogOutput private constructor(val diff: CatalogDiff?, val message: String?, val stackTrace: String?) { + @JsonIgnoreProperties(ignoreUnknown = true) + class Builder + @JvmOverloads + constructor(var diff: CatalogDiff? = null, var message: String? = null, var stackTrace: String? = null) { + fun diff(diff: CatalogDiff) = apply { this.diff = diff } + + fun message(message: String) = apply { this.message = message } + + fun stackTrace(stackTrace: String) = apply { this.stackTrace = stackTrace } + + fun build() = PostprocessCatalogOutput(diff, message, stackTrace) + } + + @JsonIgnore + val isSuccess = message == null && stackTrace == null + + @JsonIgnore + val isFailure = !isSuccess + + companion object { + fun success(diff: CatalogDiff?): PostprocessCatalogOutput = PostprocessCatalogOutput(diff, null, null) + + fun failure(t: Throwable): PostprocessCatalogOutput = PostprocessCatalogOutput(null, t.message, t.stackTraceToString()) + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/SpecInput.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/SpecInput.kt new file mode 100644 index 00000000000..b4e7d4cdad5 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/models/SpecInput.kt @@ -0,0 +1,12 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.models + +import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig + +data class SpecInput( + var jobRunConfig: JobRunConfig, + var launcherConfig: IntegrationLauncherConfig, +) diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/pod/PodLabeler.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/pod/PodLabeler.kt index e480390e4b9..c47068508c2 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/pod/PodLabeler.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/pod/PodLabeler.kt @@ -25,7 +25,7 @@ import jakarta.inject.Singleton import java.util.UUID @Singleton -class PodLabeler { +class PodLabeler(private val podNetworkSecurityLabeler: PodNetworkSecurityLabeler) { fun getSourceLabels(): Map { return mapOf( SYNC_STEP_KEY to READ_STEP, @@ -113,12 +113,15 @@ class PodLabeler { mutexKey: String?, passThroughLabels: Map, autoId: UUID, + workspaceId: UUID?, + networkSecurityTokens: List, ): Map { return passThroughLabels + getMutexLabels(mutexKey) + getWorkloadLabels(workloadId) + getAutoIdLabels(autoId) + - getPodSweeperLabels() + getPodSweeperLabels() + + podNetworkSecurityLabeler.getLabels(workspaceId, networkSecurityTokens) } fun getReplicationImageLabels( diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/pod/PodNetworkSecurityLabeler.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/pod/PodNetworkSecurityLabeler.kt new file mode 100644 index 00000000000..57c600b9398 --- /dev/null +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/pod/PodNetworkSecurityLabeler.kt @@ -0,0 +1,91 @@ +package io.airbyte.workers.pod + +import io.airbyte.workers.hashing.Hasher +import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicy +import io.fabric8.kubernetes.client.KubernetesClient +import io.micronaut.cache.CacheManager +import io.micronaut.context.annotation.Requires +import jakarta.inject.Singleton +import java.util.UUID + +/** + * Class which lets us get the network security labels for pods. + * Tokens will be on the networkPolicies themselves in the format: + * airbyte/networkSecurityTokenHash: sha256(token, salt) + * + * These will be cached to avoid hitting the kube API every single time (should change infrequently) + */ +@Singleton +class PodNetworkSecurityLabeler( + private val networkPolicyFetcher: NetworkPolicyFetcher?, + cacheManager: CacheManager, + private val hasher: Hasher, +) { + private val cache = cacheManager.getCache("network-security-labels") + + fun getLabels( + workspaceId: UUID?, + networkSecurityTokens: List, + ): Map { + return workspaceId?.let { + networkPolicyFetcher?.let { + if (networkSecurityTokens.isEmpty()) { + // Short circuit if we have no tokens to fetch policies for + return emptyMap() + } + try { + val cachedLabels = cache.get(workspaceId, Map::class.java) + if (cachedLabels.isPresent && cachedLabels.get().isNotEmpty()) { + return cachedLabels.get() as Map + } + val matchingNetworkPolicies = networkPolicyFetcher.matchingNetworkPolicies(workspaceId, networkSecurityTokens, hasher) + + val labels = flatten(matchingNetworkPolicies.map { it.spec.podSelector.matchLabels }) + cache.put(workspaceId, labels) + return labels + } catch (e: Exception) { + logger.error(e) { "Failed to get network security labels for workspace $workspaceId" } + return emptyMap() + } + } ?: run { + logger.debug { "NetworkPolicyFetcher is null, skipping network security labels" } + emptyMap() + } + } ?: run { + logger.debug { "Workspace ID is null, skipping network security labels" } + emptyMap() + } + } + + private fun flatten(list: List>): Map = + mutableMapOf().apply { + for (innerMap in list) putAll(innerMap) + } +} + +@Singleton +@Requires(property = "airbyte.workload-launcher.network-policy-introspection", value = "true") +class NetworkPolicyFetcher( + private val kubernetesClient: KubernetesClient, +) { + private val tokenHashKey = "airbyte/networkSecurityTokenHash" + private val salt = "airbyte.network.security.token" + private val workspaceIdLabelKey = "airbyte/workspaceId" + + fun matchingNetworkPolicies( + workspaceId: UUID, + networkSecurityTokens: List, + hasher: Hasher, + ): List { + // Need to truncate the token because labels can only be so long + val hashedTokens = networkSecurityTokens.map { hasher.hash(it, salt).slice(IntRange(0, 49)) } + return kubernetesClient + .network() + .networkPolicies() + .inNamespace("jobs") + .withLabelIn(tokenHashKey, *hashedTokens.toTypedArray()) + .withLabel(workspaceIdLabelKey, workspaceId.toString()) + .list() + .items + } +} diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/OrchestratorConstants.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/OrchestratorConstants.kt index bb2e6a32acb..a4e0ffcf933 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/OrchestratorConstants.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/OrchestratorConstants.kt @@ -34,6 +34,7 @@ object OrchestratorConstants { EnvVar.STORAGE_BUCKET_LOG, EnvVar.STORAGE_BUCKET_STATE, EnvVar.STORAGE_BUCKET_WORKLOAD_OUTPUT, + EnvVar.STORAGE_BUCKET_AUDIT_LOGGING, EnvVar.STORAGE_TYPE, EnvVar.WORKSPACE_ROOT, ).map { it.name }, diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java index b5d072a95da..b978f3233ac 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/ReplicationInputHydratorTest.java @@ -8,6 +8,7 @@ import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -65,6 +66,7 @@ import io.airbyte.workers.exception.WorkerException; import io.airbyte.workers.helper.BackfillHelper; import io.airbyte.workers.helper.CatalogDiffConverter; +import io.airbyte.workers.helper.MapperSecretHydrationHelper; import io.airbyte.workers.helper.ResumableFullRefreshStatsHelper; import io.airbyte.workers.input.ReplicationInputMapper; import io.airbyte.workers.models.RefreshSchemaActivityOutput; @@ -189,6 +191,7 @@ class ReplicationInputHydratorTest { null); private static SecretsRepositoryReader secretsRepositoryReader; + private static MapperSecretHydrationHelper mapperSecretHydrationHelper; private static AirbyteApiClient airbyteApiClient; private static ConnectionApi connectionApi; private static StateApi stateApi; @@ -205,6 +208,7 @@ class ReplicationInputHydratorTest { @BeforeEach void setup() throws IOException { secretsRepositoryReader = mock(SecretsRepositoryReader.class); + mapperSecretHydrationHelper = mock(MapperSecretHydrationHelper.class); airbyteApiClient = mock(AirbyteApiClient.class); attemptApi = mock(AttemptApi.class); connectionApi = mock(ConnectionApi.class); @@ -228,6 +232,7 @@ void setup() throws IOException { when(airbyteApiClient.getActorDefinitionVersionApi()).thenReturn(actorDefinitionVersionApi); when(airbyteApiClient.getDestinationApi()).thenReturn(destinationApi); when(stateApi.getState(new ConnectionIdRequestBody(CONNECTION_ID))).thenReturn(CONNECTION_STATE_RESPONSE); + when(mapperSecretHydrationHelper.hydrateMapperSecrets(any(), anyBoolean(), any())).thenAnswer(invocation -> invocation.getArgument(0)); } private ReplicationInputHydrator getReplicationInputHydrator() { @@ -235,6 +240,7 @@ private ReplicationInputHydrator getReplicationInputHydrator() { airbyteApiClient, resumableFullRefreshStatsHelper, secretsRepositoryReader, + mapperSecretHydrationHelper, backfillHelper, catalogClientConverters, new ReplicationInputMapper(), @@ -261,7 +267,8 @@ private ReplicationActivityInput getDefaultReplicationActivityInputForTest() { "unused", null, // unused new ConnectionContext().withOrganizationId(UUID.randomUUID()), - null); + null, + Collections.emptyList()); } @ParameterizedTest @@ -294,7 +301,7 @@ void testGenerateReplicationInputHandlesResets(final boolean withRefresh) throws // Verify that if the sync is a reset, we retrieve the job info and handle the streams accordingly. final ReplicationInputHydrator replicationInputHydrator = getReplicationInputHydrator(); final ReplicationActivityInput input = getDefaultReplicationActivityInputForTest(); - input.setIsReset(true); + input.setReset(true); when(jobsApi.getLastReplicationJob(new ConnectionIdRequestBody(CONNECTION_ID))).thenReturn( new JobOptionalRead(new JobRead( JOB_ID, diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java index b5f5c3e9ca8..065263c2110 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerHelperTest.java @@ -315,7 +315,7 @@ void testSupportRefreshesIsPassed(final boolean supportRefreshes) throws Excepti @Test void testApplyTransformationNoMapper() throws IOException { mockSupportRefreshes(false); - ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); + final ConfiguredAirbyteCatalog catalog = mock(ConfiguredAirbyteCatalog.class); when(destinationCatalogGenerator.generateDestinationCatalog(any())) .thenReturn(new DestinationCatalogGenerator.CatalogGenerationResult(catalog, Map.of())); // Need to pass in a replication context @@ -356,6 +356,12 @@ public String documentationUrl() { return null; } + @Nullable + @Override + public UUID id() { + return null; + } + @NotNull @Override public Object config() { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/EmptyAirbyteSourceTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/EmptyAirbyteSourceTest.java index 348f229d069..65e44124c43 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/EmptyAirbyteSourceTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/EmptyAirbyteSourceTest.java @@ -48,7 +48,7 @@ class EmptyAirbyteSourceTest { @BeforeEach void init() { - emptyAirbyteSource = new EmptyAirbyteSource(); + emptyAirbyteSource = new EmptyAirbyteSource(false); } @Test @@ -256,7 +256,33 @@ void testPerStream() throws Exception { emptyAirbyteSource.start(workerSourceConfig, null, null); - streamsToReset.forEach(this::testReceiveExpectedPerStreamMessages); + streamsToReset.forEach(s -> testReceiveExpectedPerStreamMessages(s, true)); + + Assertions.assertThat(emptyAirbyteSource.attemptRead()) + .isEmpty(); + + Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); + } + + @Test + void testPerStreamCustomFormat() throws Exception { + emptyAirbyteSource = new EmptyAirbyteSource(true); + + final List streamDescriptors = getProtocolStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); + + final List streamsToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); + + final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() + .withStreamsToReset(streamsToReset); + final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() + .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) + .withState(new State() + .withState(Jsons.jsonNode(createPerStreamState(streamDescriptors)))) + .withCatalog(AIRBYTE_CATALOG); + + emptyAirbyteSource.start(workerSourceConfig, null, null); + + streamsToReset.forEach(s -> testReceiveExpectedPerStreamMessages(s, false)); Assertions.assertThat(emptyAirbyteSource.attemptRead()) .isEmpty(); @@ -281,7 +307,7 @@ void testPerStreamWithExtraState() throws Exception { emptyAirbyteSource.start(workerSourceConfig, null, null); - streamsToReset.forEach(this::testReceiveExpectedPerStreamMessages); + streamsToReset.forEach(s -> testReceiveExpectedPerStreamMessages(s, true)); Assertions.assertThat(emptyAirbyteSource.attemptRead()) .isEmpty(); @@ -307,7 +333,7 @@ void testPerStreamWithMissingState() throws Exception { emptyAirbyteSource.start(workerSourceConfig, null, null); - streamsToReset.forEach(this::testReceiveExpectedPerStreamMessages); + streamsToReset.forEach(s -> testReceiveExpectedPerStreamMessages(s, true)); Assertions.assertThat(emptyAirbyteSource.attemptRead()) .isEmpty(); @@ -423,6 +449,44 @@ void testLegacyWithNewConfig() throws Exception { Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); } + @Test + void testLegacyWithNewConfigWithCustomFormat() throws Exception { + emptyAirbyteSource = new EmptyAirbyteSource(true); + final List streamsToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); + + final ResetSourceConfiguration resetSourceConfiguration = new ResetSourceConfiguration() + .withStreamsToReset(streamsToReset); + final ConfiguredAirbyteCatalog airbyteCatalog = new ConfiguredAirbyteCatalog() + .withStreams(Lists.newArrayList( + new ConfiguredAirbyteStream(getAirbyteStream("a"), SyncMode.INCREMENTAL, DestinationSyncMode.APPEND), + new ConfiguredAirbyteStream(getAirbyteStream("b"), SyncMode.INCREMENTAL, DestinationSyncMode.APPEND), + new ConfiguredAirbyteStream(getAirbyteStream("c"), SyncMode.INCREMENTAL, DestinationSyncMode.APPEND))); + + final WorkerSourceConfig workerSourceConfig = new WorkerSourceConfig() + .withSourceConnectionConfiguration(Jsons.jsonNode(resetSourceConfiguration)) + .withState(new State() + .withState(Jsons.jsonNode(Collections.singletonMap("cursor", "1")))) + .withCatalog(airbyteCatalog); + + emptyAirbyteSource.start(workerSourceConfig, null, null); + + final Optional maybeMessage = emptyAirbyteSource.attemptRead(); + Assertions.assertThat(maybeMessage) + .isNotEmpty(); + + final AirbyteMessage message = maybeMessage.get(); + Assertions.assertThat(message.getType()).isEqualTo(Type.STATE); + + final AirbyteStateMessage stateMessage = message.getState(); + Assertions.assertThat(stateMessage.getType()).isEqualTo(AirbyteStateType.LEGACY); + Assertions.assertThat(stateMessage.getData()).isEqualTo(Jsons.emptyObject()); + + Assertions.assertThat(emptyAirbyteSource.attemptRead()) + .isEmpty(); + + Assertions.assertThat(emptyAirbyteSource.isFinished()).isTrue(); + } + @Test void testLegacyWithNullState() throws Exception { final List streamsToReset = getConfigStreamDescriptorFromName(Lists.newArrayList("a", "b", "c")); @@ -482,10 +546,14 @@ private void testReceiveNullStreamStateMessage(final StreamDescriptor streamDesc Assertions.assertThat(stateMessage.getStream().getStreamState()).isNull(); } - private void testReceiveExpectedPerStreamMessages(final StreamDescriptor s) { - testReceiveResetStatusMessage(s, AirbyteStreamStatus.STARTED); + private void testReceiveExpectedPerStreamMessages(final StreamDescriptor s, final boolean includeStatus) { + if (includeStatus) { + testReceiveResetStatusMessage(s, AirbyteStreamStatus.STARTED); + } testReceiveNullStreamStateMessage(s); - testReceiveResetStatusMessage(s, AirbyteStreamStatus.COMPLETE); + if (includeStatus) { + testReceiveResetStatusMessage(s, AirbyteStreamStatus.COMPLETE); + } } private void testReceiveResetMessageTupleForSingleStateTypes(final StreamDescriptor s) { diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt index 6e2b61015e7..2922aa25c21 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/sync/WorkloadApiWorkerTest.kt @@ -40,6 +40,9 @@ import java.util.Optional import java.util.UUID import java.util.concurrent.CancellationException +private const val JOB_ID = 13L +private const val ATTEMPT_NUMBER = 37 + internal class WorkloadApiWorkerTest { private var workloadIdGenerator: WorkloadIdGenerator = mockk() private var storageClient: StorageClient = mockk() @@ -60,10 +63,32 @@ internal class WorkloadApiWorkerTest { every { apiClient.connectionApi } returns connectionApi every { workloadApiClient.workloadApi } returns workloadApi every { logClientManager.fullLogPath(any()) } answers { Path.of(invocation.args.first().toString(), DEFAULT_LOG_FILENAME).toString() } + featureFlagClient = TestClient() jobRoot = Path.of("test", "path") - replicationActivityInput = ReplicationActivityInput() - replicationInput = ReplicationInput() + + val workspaceId = UUID.randomUUID() + val connectionId = UUID.randomUUID() + val sourceId = UUID.randomUUID() + val destinationId = UUID.randomUUID() + val jobRunConfig = JobRunConfig().withJobId(JOB_ID.toString()).withAttemptId(ATTEMPT_NUMBER.toLong()) + + replicationActivityInput = + ReplicationActivityInput( + workspaceId = workspaceId, + connectionId = connectionId, + sourceId = sourceId, + destinationId = destinationId, + jobRunConfig = jobRunConfig, + ) + replicationInput = + ReplicationInput().apply { + this.workspaceId = workspaceId + this.connectionId = connectionId + this.jobRunConfig = jobRunConfig + this.signalInput = "signalInputValue" + } + workloadApiWorker = WorkloadApiWorker( jobOutputDocStore, @@ -85,7 +110,6 @@ internal class WorkloadApiWorkerTest { val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) - initializeReplicationInput(jobId, attemptNumber) every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, jobId, attemptNumber) } returns workloadId @@ -113,15 +137,12 @@ internal class WorkloadApiWorkerTest { @Test fun testFailedReplicationWithOutput() { - val jobId = 13L - val attemptNumber = 37 val workloadId = "my-workload" val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) - initializeReplicationInput(jobId, attemptNumber) - every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, jobId, attemptNumber) } returns workloadId + every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, JOB_ID, ATTEMPT_NUMBER) } returns workloadId every { connectionApi.getConnection(any()) @@ -148,15 +169,12 @@ internal class WorkloadApiWorkerTest { @Test fun testResumeReplicationThatAlreadyStarted() { - val jobId = 313L - val attemptNumber = 37 val workloadId = "my-workload" val expectedOutput = ReplicationOutput() .withReplicationAttemptSummary(ReplicationAttemptSummary().withStatus(StandardSyncSummary.ReplicationStatus.COMPLETED)) - initializeReplicationInput(jobId, attemptNumber) - every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, jobId, attemptNumber) } returns workloadId + every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, JOB_ID, ATTEMPT_NUMBER) } returns workloadId every { connectionApi.getConnection(any()) @@ -182,13 +200,10 @@ internal class WorkloadApiWorkerTest { @Test fun testReplicationWithMissingOutput() { - val jobId = 42L - val attemptNumber = 1 val workloadId = "my-failed-workload" - val expectedDocPrefix = "testNs/orchestrator-repl-job-$jobId-attempt-$attemptNumber" - initializeReplicationInput(jobId, attemptNumber) + val expectedDocPrefix = "testNs/orchestrator-repl-job-$JOB_ID-attempt-$ATTEMPT_NUMBER" - every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, jobId, attemptNumber) } returns workloadId + every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, JOB_ID, ATTEMPT_NUMBER) } returns workloadId every { connectionApi.getConnection(any()) @@ -212,12 +227,9 @@ internal class WorkloadApiWorkerTest { @Test fun testCancelledReplication() { - val jobId = 42L - val attemptNumber = 1 val workloadId = "my-failed-workload" - initializeReplicationInput(jobId, attemptNumber) - every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, jobId, attemptNumber) } returns workloadId + every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, JOB_ID, ATTEMPT_NUMBER) } returns workloadId every { connectionApi.getConnection(any()) @@ -245,12 +257,9 @@ internal class WorkloadApiWorkerTest { @Test fun testFailedReplicationWithPlatformFailure() { - val jobId = 42L - val attemptNumber = 1 val workloadId = "my-failed-workload" - initializeReplicationInput(jobId, attemptNumber) - every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, jobId, attemptNumber) } returns workloadId + every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, JOB_ID, ATTEMPT_NUMBER) } returns workloadId every { connectionApi.getConnection(any()) @@ -278,12 +287,9 @@ internal class WorkloadApiWorkerTest { @Test fun testFailedReplicationWithSourceFailure() { - val jobId = 43L - val attemptNumber = 1 val workloadId = "my-failed-workload" - initializeReplicationInput(jobId, attemptNumber) - every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, jobId, attemptNumber) } returns workloadId + every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, JOB_ID, ATTEMPT_NUMBER) } returns workloadId every { connectionApi.getConnection(any()) @@ -311,12 +317,9 @@ internal class WorkloadApiWorkerTest { @Test fun testFailedReplicationWithDestinationFailure() { - val jobId = 44L - val attemptNumber = 1 val workloadId = "my-failed-workload" - initializeReplicationInput(jobId, attemptNumber) - every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, jobId, attemptNumber) } returns workloadId + every { workloadIdGenerator.generateSyncWorkloadId(replicationInput.connectionId, JOB_ID, ATTEMPT_NUMBER) } returns workloadId every { connectionApi.getConnection(any()) @@ -342,32 +345,6 @@ internal class WorkloadApiWorkerTest { assertThrows { workloadApiWorker.run(replicationInput, jobRoot) } } - private fun initializeReplicationInput( - jobId: Long, - attemptNumber: Int, - ) { - val workspaceId = UUID.randomUUID() - val connectionId = UUID.randomUUID() - val sourceId = UUID.randomUUID() - val destinationId = UUID.randomUUID() - val jobRunConfig = JobRunConfig().withJobId(jobId.toString()).withAttemptId(attemptNumber.toLong()) - - replicationInput.apply { - this.workspaceId = workspaceId - this.connectionId = connectionId - this.jobRunConfig = jobRunConfig - this.signalInput = "signalInputValue" - } - - replicationActivityInput.apply { - this.workspaceId = workspaceId - this.connectionId = connectionId - this.sourceId = sourceId - this.destinationId = destinationId - this.jobRunConfig = jobRunConfig - } - } - private fun mockWorkload( status: WorkloadStatus, terminationSource: String? = null, diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfDestinationFactory.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfDestinationFactory.java deleted file mode 100644 index dfcd12a2e05..00000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfDestinationFactory.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.perf; - -import static io.airbyte.workers.perf.PerfFactory.sleep; - -import io.airbyte.config.WorkerDestinationConfig; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.workers.internal.AirbyteDestination; -import io.airbyte.workers.perf.PerfFactory.DestinationConfig; -import java.nio.file.Path; -import java.util.Optional; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.TimeUnit; - -@SuppressWarnings("PMD.UnusedPrivateField") -class PerfDestinationFactory { - - record PerfAirbyteDestination(DestinationHelper helper) implements AirbyteDestination { - - @Override - public void start(WorkerDestinationConfig destinationConfig, Path jobRoot) throws Exception { - - } - - @Override - public void accept(AirbyteMessage message) throws Exception { - helper.accept(message); - } - - @Override - public void notifyEndOfInput() throws Exception { - - } - - @Override - public boolean isFinished() { - return helper.isFinished(); - } - - @Override - public int getExitValue() { - return helper.exitValue(); - } - - @Override - public Optional attemptRead() { - return Optional.ofNullable(helper.nextRead()); - } - - @Override - public void close() throws Exception { - - } - - @Override - public void cancel() throws Exception { - - } - - } - - /** - * Helper class to contain all the complexities supported by the DestinationConfig. - */ - static class DestinationHelper { - - private final DestinationConfig config; - private int acceptNumRecords; - private int acceptNumLogs; - private int acceptNumStates; - - private boolean initReadWait; - private boolean initAcceptWait; - private boolean finished; - - private final BlockingQueue messages = new LinkedBlockingQueue<>(); - - DestinationHelper(final DestinationConfig config) { - this.config = config; - } - - AirbyteMessage nextRead() { - if (finished) { - return null; - } - - if (!initReadWait) { - initReadWait = true; - sleep(config.readInitialWait()); - } - - final AirbyteMessage message; - try { - message = messages.poll(config.readBatchWait().toSeconds(), TimeUnit.SECONDS); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - - return message; - } - - void accept(AirbyteMessage message) { - if (finished || message == null) { - return; - } - - if (!initAcceptWait) { - initAcceptWait = true; - sleep(config.acceptInitialWait()); - } - - if (config.acceptBatchSize() > 0 && acceptNumRecords % config.acceptBatchSize() == 0) { - sleep(config.acceptBatchWait()); - } - - try { - switch (message.getType()) { - case STATE -> { - acceptNumStates++; - if (config.echoState()) { - messages.put(message); - } - } - case LOG -> { - acceptNumLogs++; - if (config.echoLog()) { - messages.put(message); - } - } - case RECORD -> acceptNumRecords++; - default -> { - // NOOP - } - } - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - - acceptNumRecords++; - - if (config.acceptBatchSize() > 0 && acceptNumRecords % config.acceptBatchSize() == 0) { - sleep(config.acceptBatchWait()); - } - - if (acceptNumRecords == config.acceptNumRecords()) { - finished = true; - } - } - - boolean isFinished() { - return finished; - } - - int exitValue() { - return config.exitValue(); - } - - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfFactory.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfFactory.java deleted file mode 100644 index 2a078de9f42..00000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfFactory.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.perf; - -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.workers.internal.AirbyteDestination; -import io.airbyte.workers.internal.AirbyteSource; -import io.airbyte.workers.perf.PerfDestinationFactory.DestinationHelper; -import io.airbyte.workers.perf.PerfDestinationFactory.PerfAirbyteDestination; -import io.airbyte.workers.perf.PerfSourceFactory.PerfAirbyteSource; -import io.airbyte.workers.perf.PerfSourceFactory.SourceHelper; -import java.time.Duration; -import java.util.List; -import java.util.concurrent.TimeUnit; -import lombok.Builder; - -/** - * Factory for creating performance focused sources and destinations. - */ -public class PerfFactory { - - /** - * Returns a well-defined AirbyteSource for testing purposes. - * - * @param config Configuration that tells this source how to behave. - * @return AirbyteSource based on the provided configuration. - */ - public static AirbyteSource createSource(final SourceConfig config) { - return new PerfAirbyteSource(new SourceHelper(config)); - } - - /** - * Returns a well-defined AirbyteDestination for testing purposes. - * - * @param config Configuration that tells this destination how to behave. - * @return AirbyteDestination based on the provided configuration. - */ - public static AirbyteDestination createDestination(final DestinationConfig config) { - return new PerfAirbyteDestination(new DestinationHelper(config)); - } - - /** - * Returns a DestinationConfigBuilder class to configure the DestinationConfig. - * - * @return DestinationConfigBuilder - */ - public static DestinationConfig.DestinationConfigBuilder createDestinationConfigBuilder() { - return DestinationConfig.builder(); - } - - /** - * Returns a SourceConfigBuilder class to configure the SourceConfig. - * - * @return SourceConfigBuilder - */ - public static SourceConfig.SourceConfigBuilder createSourceConfigBuilder() { - return SourceConfig.builder(); - } - - /** - * Configuration options for creating a perf destination. - *

- * Destinations both accept (via accept) and return (via attemptRead) messages. - * - * @param readInitialWait How long to block on the initial attemptRead call. - * @param echoState Should this destination echo state messages? I.e. should every state message - * received in the accept call also be returned via the attemptRead call. - * @param echoLog Should this destination echo log messages? I.e. should every log message received - * in the accept call also be returned via the attemptRead call. - * @param acceptBatchSize After how many messages should the accept call block? Set to 0 to disable - * this behavior. - * @param acceptNumRecords How many records should this destination accept before it marks itself as - * finished? - * @param readBatchSize After how many messages should the attemptRead call block? Set to 0 to - * disable this behavior. - * @param readBatchWait After readBatchSize messages, block for this amount of time. - * @param acceptInitialWait How long to block on the initial accept call. - * @param acceptBatchWait After acceptBatchSize messages, block for this amount of time. - * @param acceptStateWait TODO - not implemented yet as I'm not sure if it's useful. - * @param acceptLogWait TODO - not implemented yet as I'm not sure if it's useful. - * @param exitValue What value the exitValue method should return. - */ - @Builder - record DestinationConfig( - Duration readInitialWait, - boolean echoState, - boolean echoLog, - int acceptBatchSize, - int acceptNumRecords, - int readBatchSize, - Duration readBatchWait, - Duration acceptInitialWait, - Duration acceptBatchWait, - Duration acceptStateWait, - Duration acceptLogWait, - int exitValue) { - - DestinationConfig { - if (readInitialWait == null) { - readInitialWait = Duration.ZERO; - } - if (readBatchWait == null) { - readBatchWait = Duration.ZERO; - } - if (acceptInitialWait == null) { - acceptInitialWait = Duration.ZERO; - } - if (acceptBatchWait == null) { - acceptBatchWait = Duration.ZERO; - } - } - - } - - /** - * Configuration options for creating a perf source. - *

- * Sources only return messages via the attemptRead method. - * - * @param readRecords Records that will be returned from the attemptRead call. The attemptRead - * returns only a single record at a time, this list will be looped over until the source is - * finished. - * @param readLogs Logs that will be returned from the attemptRead call. The attemptRead returns - * only a single record at a time, this * list will be looped over until the source is - * finished. - * @param readNumRecords How many records should be returned from attemptRead before this source is - * marked as finished. - * @param readBatchSize After how many messages should the attemptRead call block? Set to 0 to - * disable this behavior. - * @param readInitialWait How long to block on the initial attemptRead call. - * @param readBatchWait After readBatchSize messages, block for this amount of time. - * @param readLogEvery Have attemptRead return a log message after this many record messages - * returned. - * @param readStateEvery Have attemptRead return a state message after this many record messages - * returned. - * @param exitValue What value the exitValue method should return. - */ - @Builder - record SourceConfig( - List readRecords, - List readLogs, - int readNumRecords, - int readBatchSize, - Duration readInitialWait, - Duration readBatchWait, - int readLogEvery, - int readStateEvery, - int exitValue) { - - SourceConfig { - if (readNumRecords > 0 && (readRecords == null || readRecords.isEmpty())) { - throw new IllegalArgumentException("cannot specify readNumRecords without also specifying readRecords"); - } - if (readLogEvery > 0 && (readLogs == null || readLogs.isEmpty())) { - throw new IllegalArgumentException("cannot specify readLogsEvey without also specifying readLogs"); - } - if (readInitialWait == null) { - readInitialWait = Duration.ZERO; - } - if (readBatchWait == null) { - readBatchWait = Duration.ZERO; - } - } - - } - - /** - * Helper method that exists to aid in blocking for a given duration of time. - * - * @param duration how long to block for. - */ - static void sleep(Duration duration) { - if (duration.isZero()) { - return; - } - - final var sleepSeconds = duration.toSeconds(); - try { - TimeUnit.SECONDS.sleep(sleepSeconds); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - } - -} diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfSourceFactory.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfSourceFactory.java deleted file mode 100644 index fb6d9c85eda..00000000000 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/perf/PerfSourceFactory.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.perf; - -import static io.airbyte.workers.perf.PerfFactory.sleep; - -import io.airbyte.config.WorkerSourceConfig; -import io.airbyte.protocol.models.AirbyteMessage; -import io.airbyte.protocol.models.AirbyteMessage.Type; -import io.airbyte.workers.internal.AirbyteSource; -import io.airbyte.workers.perf.PerfFactory.SourceConfig; -import io.airbyte.workers.test_utils.AirbyteMessageUtils; -import java.nio.file.Path; -import java.util.Optional; -import java.util.UUID; - -@SuppressWarnings("PMD.UnusedPrivateField") -class PerfSourceFactory { - - record PerfAirbyteSource(SourceHelper helper) implements AirbyteSource { - - @Override - public void start(WorkerSourceConfig sourceConfig, Path jobRoot, UUID connectionId) throws Exception {} - - @Override - public boolean isFinished() { - return helper.isFinished(); - } - - @Override - public int getExitValue() { - return helper.exitValue(); - } - - @Override - public Optional attemptRead() { - return Optional.ofNullable(helper.nextRead()); - } - - @Override - public void close() throws Exception {} - - @Override - public void cancel() throws Exception {} - - } - - static class SourceHelper { - - private final SourceConfig config; - private int readNumRecords; - private int readNumLogs; - private int readNumStates; - private boolean initReadWait; - private boolean finished; - // ensure we don't get stuck in - private boolean lastMessageLog; - private boolean lastMessageState; - - SourceHelper(final SourceConfig config) { - this.config = config; - } - - AirbyteMessage nextRead() { - if (finished) { - return null; - } - - if (!initReadWait) { - initReadWait = true; - sleep(config.readInitialWait()); - } - - final AirbyteMessage message; - - // should log if the previous message wasn't a log, log-every is defined, and we're at the log-every - // number of messages - final var shouldLog = !lastMessageLog && config.readLogEvery() > 0 && readNumRecords % config.readLogEvery() == 0; - // should state if we're on the last record (the last message should always be a state) OR - // the previous message wasn't a state, state-every is defined, and we're on the state-every number - // of messages - final var shouldState = readNumRecords == config.readNumRecords() || (!lastMessageState && (config.readStateEvery() > 0 - && readNumRecords % config.readStateEvery() == 0)); - - if (shouldLog) { - message = config.readLogs().get((readNumLogs % config.readLogs().size()) - 1); - readNumLogs++; - lastMessageLog = true; - lastMessageState = false; - } else if (shouldState) { - final var stateStream = AirbyteMessageUtils.createStreamStateMessage("perf/stream-0", readNumRecords); - message = new AirbyteMessage().withType(Type.STATE).withState(stateStream); - readNumStates++; - lastMessageLog = false; - lastMessageState = true; - } else { - // typical message - message = config.readRecords().get((readNumRecords % config.readRecords().size()) - 1); - readNumRecords++; - if (config.readBatchSize() > 0 && readNumRecords % config.readBatchSize() == 0) { - sleep(config.readBatchWait()); - } - lastMessageLog = false; - lastMessageState = false; - } - - if (readNumRecords == config.readNumRecords()) { - finished = true; - } - - return message; - } - - boolean isFinished() { - return finished; - } - - int exitValue() { - return config.exitValue(); - } - - } - -} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/hashing/Sha256HasherTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/hashing/Sha256HasherTest.kt new file mode 100644 index 00000000000..40bf3abcfee --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/hashing/Sha256HasherTest.kt @@ -0,0 +1,50 @@ +package io.airbyte.workers.hashing + +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.MethodSource +import java.util.stream.Stream + +class Sha256HasherTest { + private val hasher = Sha256Hasher() + + @ParameterizedTest + @MethodSource("hashTestProvider") + fun test( + input: String, + expected: String, + ) { + assertEquals(expected, hasher.hash(input)) + } + + @ParameterizedTest + @MethodSource("saltedHashTestProvider") + fun testWithSalt( + input: String, + expected: String, + salt: String, + ) { + assertEquals(expected, hasher.hash(input, salt)) + } + + companion object { + @JvmStatic + fun hashTestProvider(): Stream { + return Stream.of( + Arguments.of("token1", "df3e6b0bb66ceaadca4f84cbc371fd66e04d20fe51fc414da8d1b84d31d178de"), + Arguments.of("asdfasdf", "2413fb3709b05939f04cf2e92f7d0897fc2596f9ad0b8a9ea855c7bfebaae892"), + Arguments.of("differentstring", "bccfc5cb1ca22da8a6912fc1462d3c52e8fa2b8d1313077c092f09b9ae61925b"), + ) + } + + @JvmStatic + fun saltedHashTestProvider(): Stream { + return Stream.of( + Arguments.of("token1", "fe1897ce53efcc5f79380957459cba5cc788f0713a605e71b57c5d3031f841a6", "randomsalt"), + Arguments.of("asdfasdf", "46f7080c240fe300c78b81631a9d127adabac1bbdd6556bf39fac2f62bda5bd0", "randomsalt"), + Arguments.of("differentstring", "4dba3dce7d92f669f1bc4384402a8a16e6ecd2fa4b228560e629b16fbc2a9ebd", "randomsalt"), + ) + } + } +} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/MapperSecretHydrationHelperTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/MapperSecretHydrationHelperTest.kt new file mode 100644 index 00000000000..232b22ca53a --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/helper/MapperSecretHydrationHelperTest.kt @@ -0,0 +1,135 @@ +package io.airbyte.workers.helper + +import io.airbyte.api.client.AirbyteApiClient +import io.airbyte.api.client.model.generated.ScopeType +import io.airbyte.api.client.model.generated.SecretPersistenceConfig +import io.airbyte.api.client.model.generated.SecretPersistenceType +import io.airbyte.commons.json.Jsons +import io.airbyte.config.AirbyteSecret +import io.airbyte.config.AirbyteStream +import io.airbyte.config.ConfiguredAirbyteCatalog +import io.airbyte.config.ConfiguredAirbyteStream +import io.airbyte.config.DestinationSyncMode +import io.airbyte.config.MapperConfig +import io.airbyte.config.SyncMode +import io.airbyte.config.mapper.configs.AesEncryptionConfig +import io.airbyte.config.mapper.configs.AesMode +import io.airbyte.config.mapper.configs.AesPadding +import io.airbyte.config.mapper.configs.EncryptionConfig +import io.airbyte.config.mapper.configs.EncryptionMapperConfig +import io.airbyte.config.mapper.configs.HashingConfig +import io.airbyte.config.mapper.configs.HashingMapperConfig +import io.airbyte.config.mapper.configs.HashingMethods +import io.airbyte.config.secrets.SecretsRepositoryReader +import io.airbyte.mappers.transformations.EncryptionMapper +import io.airbyte.mappers.transformations.HashingMapper +import io.airbyte.mappers.transformations.Mapper +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.UUID + +internal class MapperSecretHydrationHelperTest { + companion object { + private const val SECRET_VALUE = "my_secret_value" + private const val SECRET_COORDINATE = "airbyte_coordinate" + private val ORGANIZATION_ID = UUID.randomUUID() + } + + private val airbyteApiClient = mockk() + private val secretsRepositoryReader = mockk(relaxed = true) + + private val hashingMapper = HashingMapper() + private val encryptionMapper = EncryptionMapper() + + private val mapperSecretHydrationHelper = + MapperSecretHydrationHelper( + mappers = listOf(encryptionMapper as Mapper, hashingMapper as Mapper), + secretsRepositoryReader = secretsRepositoryReader, + airbyteApiClient = airbyteApiClient, + ) + + @BeforeEach + fun setup() { + every { + airbyteApiClient.secretPersistenceConfigApi.getSecretsPersistenceConfig(any()) + } returns SecretPersistenceConfig(SecretPersistenceType.TESTING, Jsons.emptyObject(), ScopeType.ORGANIZATION, ORGANIZATION_ID) + } + + @Test + fun `test hydrate mapper config`() { + val mapperConfig = + EncryptionMapperConfig( + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "target", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Reference(SECRET_COORDINATE), + ), + ) + + val mapperConfigJson = Jsons.jsonNode(mapperConfig.config()) + val configWithSecrets = + Jsons.jsonNode( + mapOf( + "algorithm" to "AES", + "targetField" to "target", + "mode" to "CBC", + "padding" to "NoPadding", + "key" to SECRET_VALUE, + ), + ) + + every { secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence(eq(mapperConfigJson), any()) } returns configWithSecrets + + val catalog = generateCatalogWithMapper(mapperConfig) + val hydratedConfig = mapperSecretHydrationHelper.hydrateMapperSecrets(catalog, true, ORGANIZATION_ID) + + val expectedConfig = + mapperConfig.copy( + config = + AesEncryptionConfig( + algorithm = EncryptionConfig.ALGO_AES, + targetField = "target", + mode = AesMode.CBC, + padding = AesPadding.NoPadding, + key = AirbyteSecret.Hydrated(SECRET_VALUE), + ), + ) + + Assertions.assertEquals(expectedConfig, hydratedConfig.streams.first().mappers.first()) + + verify { secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence(eq(mapperConfigJson), any()) } + } + + @Test + fun `test without secrets in spec does not try to hydrate secrets`() { + val mapperConfig = + HashingMapperConfig(config = HashingConfig(targetField = "target", method = HashingMethods.SHA256, fieldNameSuffix = "_hashed")) + val catalog = generateCatalogWithMapper(mapperConfig) + + val resultingCatalog = mapperSecretHydrationHelper.hydrateMapperSecrets(catalog, true, ORGANIZATION_ID) + Assertions.assertEquals(mapperConfig, resultingCatalog.streams.first().mappers.first()) + + verify(exactly = 0) { + airbyteApiClient.secretPersistenceConfigApi.getSecretsPersistenceConfig(any()) + secretsRepositoryReader.hydrateConfigFromRuntimeSecretPersistence(any(), any()) + secretsRepositoryReader.hydrateConfigFromDefaultSecretPersistence(any()) + } + } + + private fun generateCatalogWithMapper(mapperConfig: MapperConfig): ConfiguredAirbyteCatalog { + return ConfiguredAirbyteCatalog( + listOf( + ConfiguredAirbyteStream.Builder().stream( + mockk(), + ).syncMode(SyncMode.FULL_REFRESH).destinationSyncMode(DestinationSyncMode.OVERWRITE).mappers(listOf(mapperConfig)).build(), + ), + ) + } +} diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/input/ReplicationInputMapperTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/input/ReplicationInputMapperTest.kt index 6963c27c8cb..8828f8c1e60 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/input/ReplicationInputMapperTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/input/ReplicationInputMapperTest.kt @@ -87,6 +87,7 @@ class ReplicationInputMapperTest { null, ConnectionContext().withOrganizationId(UUID.randomUUID()), null, + emptyList(), ) } diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/pod/PodLabelerTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/pod/PodLabelerTest.kt index 82e56408137..80867399dc7 100644 --- a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/pod/PodLabelerTest.kt +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/pod/PodLabelerTest.kt @@ -20,7 +20,10 @@ import io.airbyte.workers.pod.PodLabeler.LabelKeys.ORCHESTRATOR_IMAGE_VERSION import io.airbyte.workers.pod.PodLabeler.LabelKeys.SOURCE_IMAGE_NAME import io.airbyte.workers.pod.PodLabeler.LabelKeys.SOURCE_IMAGE_VERSION import io.airbyte.workers.pod.PodLabeler.LabelKeys.WORKLOAD_ID +import io.mockk.every +import io.mockk.mockk import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.Arguments @@ -30,9 +33,17 @@ import java.util.stream.Stream import io.airbyte.workers.pod.PodLabeler.LabelKeys.ORCHESTRATOR_IMAGE_NAME as REPL_ORCHESTRATOR_IMAGE_NAME class PodLabelerTest { + private lateinit var mPodNetworkSecurityLabeler: PodNetworkSecurityLabeler + + @BeforeEach + fun setUp() { + mPodNetworkSecurityLabeler = mockk() + every { mPodNetworkSecurityLabeler.getLabels(any(), any()) } returns emptyMap() + } + @Test fun getSourceLabels() { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val result = labeler.getSourceLabels() assert( @@ -45,7 +56,7 @@ class PodLabelerTest { @Test fun getDestinationLabels() { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val result = labeler.getDestinationLabels() assert( @@ -58,7 +69,7 @@ class PodLabelerTest { @Test fun getReplicationOrchestratorLabels() { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val result = labeler.getReplicationOrchestratorLabels(ORCHESTRATOR_IMAGE_NAME) val shortImageName = PodUtils.getShortImageName(ORCHESTRATOR_IMAGE_NAME) val imageVersion = PodUtils.getImageVersion(ORCHESTRATOR_IMAGE_NAME) @@ -76,7 +87,7 @@ class PodLabelerTest { @Test fun getCheckLabels() { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val result = labeler.getCheckLabels() assert( @@ -89,7 +100,7 @@ class PodLabelerTest { @Test fun getDiscoverLabels() { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val result = labeler.getDiscoverLabels() assert( @@ -102,7 +113,7 @@ class PodLabelerTest { @Test fun getSpecLabels() { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val result = labeler.getSpecLabels() assert( @@ -116,7 +127,7 @@ class PodLabelerTest { @ParameterizedTest @MethodSource("randomStringMatrix") fun getWorkloadLabels(workloadId: String) { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val result = labeler.getWorkloadLabels(workloadId) assert( @@ -130,7 +141,7 @@ class PodLabelerTest { @ParameterizedTest @MethodSource("randomStringMatrix") fun getMutexLabels(key: String) { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val result = labeler.getMutexLabels(key) assert( @@ -143,7 +154,7 @@ class PodLabelerTest { @Test fun getAutoIdLabels() { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val id = UUID.randomUUID() val result = labeler.getAutoIdLabels(id) @@ -163,8 +174,8 @@ class PodLabelerTest { passThroughLabels: Map, autoId: UUID, ) { - val labeler = PodLabeler() - val result = labeler.getSharedLabels(workloadId, mutexKey, passThroughLabels, autoId) + val labeler = PodLabeler(mPodNetworkSecurityLabeler) + val result = labeler.getSharedLabels(workloadId, mutexKey, passThroughLabels, autoId, null, emptyList()) assert( result == @@ -176,9 +187,34 @@ class PodLabelerTest { ) } + @Test + fun getSharedLabelsWithNetworkSecurityLabels() { + val podNetworkSecurityLabeler: PodNetworkSecurityLabeler = mockk() + val labeler = PodLabeler(podNetworkSecurityLabeler) + val workloadId = UUID.randomUUID().toString() + val mutexKey = UUID.randomUUID().toString() + val passThroughLabels = mapOf("random labels1" to "from input msg1") + val autoId = UUID.randomUUID() + val workspaceId = UUID.randomUUID() + val networkSecurityTokens = listOf("token1") + + every { podNetworkSecurityLabeler.getLabels(workspaceId, networkSecurityTokens) } returns mapOf("networkSecurityTokenHash" to "hashedToken1") + val result = labeler.getSharedLabels(workloadId, mutexKey, passThroughLabels, autoId, workspaceId, networkSecurityTokens) + + assert( + result == + passThroughLabels + + labeler.getWorkloadLabels(workloadId) + + labeler.getMutexLabels(mutexKey) + + labeler.getAutoIdLabels(autoId) + + labeler.getPodSweeperLabels() + + mapOf("networkSecurityTokenHash" to "hashedToken1"), + ) + } + @Test internal fun testGetReplicationLabels() { - val labeler = PodLabeler() + val labeler = PodLabeler(mPodNetworkSecurityLabeler) val version = "dev" val orchestrationImageName = "orchestrator-image-name:$version" val sourceImageName = "source-image-name:$version" diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/pod/PodNetworkSecurityLabelerTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/pod/PodNetworkSecurityLabelerTest.kt new file mode 100644 index 00000000000..cd06bc3094a --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/pod/PodNetworkSecurityLabelerTest.kt @@ -0,0 +1,82 @@ +package io.airbyte.workers.pod + +import io.airbyte.workers.hashing.TestHasher +import io.fabric8.kubernetes.api.model.LabelSelector +import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicy +import io.fabric8.kubernetes.api.model.networking.v1.NetworkPolicySpec +import io.micronaut.cache.CacheManager +import io.micronaut.cache.SyncCache +import io.mockk.every +import io.mockk.just +import io.mockk.mockk +import io.mockk.runs +import io.mockk.verify +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.Optional +import java.util.UUID + +class PodNetworkSecurityLabelerTest { + private lateinit var mNetworkPolicyFetcher: NetworkPolicyFetcher + private lateinit var mCacheManager: CacheManager + private lateinit var mCache: SyncCache + private val tokenHashKey = "airbyte/networkSecurityTokenHash" + + @BeforeEach + fun setup() { + mNetworkPolicyFetcher = mockk() + mCacheManager = mockk() + mCache = mockk() + + every { mCacheManager.getCache(any()) } returns mCache + } + + @Test + fun testGetLabels() { + val labeler = PodNetworkSecurityLabeler(mNetworkPolicyFetcher, mCacheManager, TestHasher()) + val networkSecurityTokens = listOf("token1", "token2") + val workspaceId = UUID.randomUUID() + // No cache + every { mCache.get(workspaceId, Map::class.java) } returns Optional.empty() + every { mCache.put(any(), any()) } just runs + every { mNetworkPolicyFetcher.matchingNetworkPolicies(workspaceId, networkSecurityTokens, any()) } returns + listOf( + NetworkPolicy().apply { + spec = + NetworkPolicySpec().apply { + podSelector = + LabelSelector().apply { + matchLabels = mapOf("Label1" to "value1") + } + } + }, + NetworkPolicy().apply { + spec = + NetworkPolicySpec().apply { + podSelector = + LabelSelector().apply { + matchLabels = mapOf("Label2" to "value2") + } + } + }, + ) + val result = labeler.getLabels(workspaceId, networkSecurityTokens) + assertEquals(mapOf("Label1" to "value1", "Label2" to "value2"), result) + } + + @Test + fun testShortCircuit() { + val labeler = PodNetworkSecurityLabeler(mNetworkPolicyFetcher, mCacheManager, TestHasher()) + val networkSecurityTokens = emptyList() + val workspaceId = UUID.randomUUID() + // No cache + every { mCache.get(workspaceId, Map::class.java) } returns Optional.empty() + every { mCache.put(any(), any()) } just runs + val result = labeler.getLabels(workspaceId, networkSecurityTokens) + assertEquals(emptyMap(), result) + verify(exactly = 0) { mNetworkPolicyFetcher.matchingNetworkPolicies(any(), any(), any()) } + verify(exactly = 0) { mCache.get(any(), Map::class.java) } + verify(exactly = 0) { mCache.put(any(), any()) } + } +} diff --git a/airbyte-commons/build.gradle.kts b/airbyte-commons/build.gradle.kts index 089a01b28c8..414c86a9b83 100644 --- a/airbyte-commons/build.gradle.kts +++ b/airbyte-commons/build.gradle.kts @@ -7,8 +7,7 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut + api(libs.bundles.micronaut.annotation) implementation(libs.bundles.jackson) implementation(libs.guava) @@ -35,12 +34,13 @@ airbyte { } } -val downloadSpecSecretMask = tasks.register("downloadSpecSecretMask") { - src("https://connectors.airbyte.com/files/registries/v0/specs_secrets_mask.yaml") - dest(File(projectDir, "src/main/resources/seed/specs_secrets_mask.yaml")) - overwrite(true) - onlyIfModified(true) -} +val downloadSpecSecretMask = + tasks.register("downloadSpecSecretMask") { + src("https://connectors.airbyte.com/files/registries/v0/specs_secrets_mask.yaml") + dest(File(projectDir, "src/main/resources/seed/specs_secrets_mask.yaml")) + overwrite(true) + onlyIfModified(true) + } tasks.named("processResources") { dependsOn(downloadSpecSecretMask) @@ -51,6 +51,6 @@ tasks.named("test") { mapOf( "Z_TESTING_PURPOSES_ONLY_1" to "value-defined", "Z_TESTING_PURPOSES_ONLY_2" to " ", - ) + ), ) } diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java b/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java index 84ce20f23ae..f9d5f159147 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/io/LineGobbler.java @@ -104,7 +104,7 @@ public static void gobble(final InputStream is, * * @param message message to be consumed */ - public static void gobble(final String message) { + private static void gobble(final String message) { gobble(message, LOGGER::info); } @@ -113,9 +113,15 @@ public static void gobble(final String message) { * temporal activity. * * @param message message to emphasize + * @deprecated use info logging with correct mdc context instead */ + @Deprecated public static void startSection(final String message) { - gobble("\r\n----- START " + message + " -----\r\n\r\n"); + gobble(formatStartSection(message)); + } + + public static String formatStartSection(final String message) { + return "\r\n----- START " + message + " -----\r\n\r\n"; } /** @@ -123,9 +129,14 @@ public static void startSection(final String message) { * temporal. activity * * @param message message to emphasize + * @deprecated use info logging with correct mdc context instead */ public static void endSection(final String message) { - gobble("\r\n----- END " + message + " -----\r\n\r\n"); + gobble(formatEndSection(message)); + } + + public static String formatEndSection(final String message) { + return "\r\n----- END " + message + " -----\r\n\r\n"; } private final BufferedReader is; diff --git a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java index 63738af2578..bf1e4cb511e 100644 --- a/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java +++ b/airbyte-commons/src/main/java/io/airbyte/commons/json/JsonPaths.java @@ -83,6 +83,10 @@ public Set

* Handle persisting Permission to the Config Database and perform all SQL queries. */ -@Slf4j public class OrganizationPersistence { private final ExceptionWrappingDatabase database; @@ -269,8 +267,6 @@ private void updateOrganizationInDB(final DSLContext ctx, final Organization org .set(ORGANIZATION.NAME, organization.getName()) .set(ORGANIZATION.EMAIL, organization.getEmail()) .set(ORGANIZATION.USER_ID, organization.getUserId()) - .set(ORGANIZATION.PBA, organization.getPba()) - .set(ORGANIZATION.ORG_LEVEL_BILLING, organization.getOrgLevelBilling()) .set(ORGANIZATION.UPDATED_AT, timestamp) .where(ORGANIZATION.ID.eq(organization.getOrganizationId())) .execute(); @@ -291,8 +287,6 @@ private void insertOrganizationIntoDB(final DSLContext ctx, final Organization o .set(ORGANIZATION.USER_ID, organization.getUserId()) .set(ORGANIZATION.NAME, organization.getName()) .set(ORGANIZATION.EMAIL, organization.getEmail()) - .set(ORGANIZATION.PBA, organization.getPba()) - .set(ORGANIZATION.ORG_LEVEL_BILLING, organization.getOrgLevelBilling()) .set(ORGANIZATION.CREATED_AT, timestamp) .set(ORGANIZATION.UPDATED_AT, timestamp) .execute(); @@ -324,9 +318,7 @@ private static Organization createOrganizationFromRecord(final Record record) { .withName(record.get(ORGANIZATION.NAME)) .withEmail(record.get(ORGANIZATION.EMAIL)) .withUserId(record.get(ORGANIZATION.USER_ID)) - .withSsoRealm(record.get(SSO_CONFIG.KEYCLOAK_REALM)) - .withPba(record.get(ORGANIZATION.PBA)) - .withOrgLevelBilling(record.get(ORGANIZATION.ORG_LEVEL_BILLING)); + .withSsoRealm(record.get(SSO_CONFIG.KEYCLOAK_REALM)); } private static SsoConfig createSsoConfigFromRecord(final Record record) { diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StateUpdateBatch.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StateUpdateBatch.java index b657e583e89..a3371766423 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StateUpdateBatch.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StateUpdateBatch.java @@ -6,11 +6,9 @@ import java.util.ArrayList; import java.util.List; -import lombok.Getter; import org.jooq.DSLContext; import org.jooq.Query; -@Getter class StateUpdateBatch { private final List updatedStreamStates = new ArrayList<>(); @@ -23,4 +21,16 @@ void save(final DSLContext ctx) { ctx.batch(deletedStreamStates).execute(); } + public List getDeletedStreamStates() { + return deletedStreamStates; + } + + public List getCreatedStreamStates() { + return createdStreamStates; + } + + public List getUpdatedStreamStates() { + return updatedStreamStates; + } + } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java index f7da4a2640d..0e42d4bdd7a 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/StreamResetPersistence.java @@ -46,7 +46,7 @@ public List getStreamResets(final UUID connectionId) throws IO .where(STREAM_RESET.CONNECTION_ID.eq(connectionId)) .fetch(getStreamResetRecordMapper()) .stream() - .flatMap(row -> Stream.of(new StreamDescriptor().withName(row.streamName()).withNamespace(row.streamNamespace()))) + .flatMap(row -> Stream.of(new StreamDescriptor().withName(row.getStreamName()).withNamespace(row.getStreamNamespace()))) .toList(); } diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/UserPersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/UserPersistence.java index 3669d3c1bbd..ecf3034fe13 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/UserPersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/UserPersistence.java @@ -32,17 +32,19 @@ import io.airbyte.db.instance.configs.jooq.generated.enums.AuthProvider; import io.airbyte.db.instance.configs.jooq.generated.enums.Status; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.time.OffsetDateTime; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; import org.jooq.DSLContext; import org.jooq.JSONB; import org.jooq.Record; import org.jooq.Result; import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * User Persistence. @@ -50,10 +52,11 @@ * Perform all SQL queries and handle persisting User to the Config Database. * */ -@Slf4j @SuppressWarnings("PMD.LiteralsFirstInComparisons") public class UserPersistence { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static final String PRIMARY_KEY = "id"; /** diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/WorkspacePersistence.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/WorkspacePersistence.java index 7053bbef7f5..723f29b8134 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/WorkspacePersistence.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/WorkspacePersistence.java @@ -18,12 +18,10 @@ import java.util.List; import java.util.Optional; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; /** * Persistence Interface for Workspace table. */ -@Slf4j public class WorkspacePersistence { public static final String DEFAULT_WORKSPACE_NAME = "Default Workspace"; diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java index b5569e3fa5d..f6841c9a7be 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/OrganizationPersistenceTest.java @@ -75,9 +75,7 @@ void createOrganization() throws Exception { .withOrganizationId(UUID.randomUUID()) .withUserId(UUID.randomUUID()) .withEmail("octavia@airbyte.io") - .withName("new org") - .withPba(false) - .withOrgLevelBilling(false); + .withName("new org"); organizationPersistence.createOrganization(organization); final Optional result = organizationPersistence.getOrganization(organization.getOrganizationId()); assertTrue(result.isPresent()); @@ -90,9 +88,7 @@ void createSsoConfig() throws Exception { .withOrganizationId(UUID.randomUUID()) .withUserId(UUID.randomUUID()) .withEmail("test@test.com") - .withName("new org") - .withPba(false) - .withOrgLevelBilling(false); + .withName("new org"); final SsoConfig ssoConfig = new SsoConfig() .withSsoConfigId(UUID.randomUUID()) .withOrganizationId(org.getOrganizationId()) @@ -152,8 +148,6 @@ void updateOrganization() throws Exception { updatedOrganization.setName("new name"); updatedOrganization.setEmail("newemail@airbyte.io"); - updatedOrganization.setPba(!updatedOrganization.getPba()); - updatedOrganization.setOrgLevelBilling(!updatedOrganization.getOrgLevelBilling()); updatedOrganization.setUserId(MockData.CREATOR_USER_ID_5); organizationPersistence.updateOrganization(updatedOrganization); @@ -163,8 +157,6 @@ void updateOrganization() throws Exception { assertEquals(updatedOrganization.getOrganizationId(), result.getOrganizationId()); assertEquals(updatedOrganization.getName(), result.getName()); assertEquals(updatedOrganization.getEmail(), result.getEmail()); - assertEquals(updatedOrganization.getPba(), result.getPba()); - assertEquals(updatedOrganization.getOrgLevelBilling(), result.getOrgLevelBilling()); assertEquals(updatedOrganization.getUserId(), result.getUserId()); } @@ -190,9 +182,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withOrganizationId(orgId1) .withUserId(userId) .withName("keyword") - .withEmail("email1") - .withPba(false) - .withOrgLevelBilling(false)); + .withEmail("email1")); // grant user an admin access to org 1 BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) @@ -205,9 +195,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withOrganizationId(orgId2) .withUserId(userId) .withName("Keyword") - .withEmail("email2") - .withPba(false) - .withOrgLevelBilling(false)); + .withEmail("email2")); // grant user an editor access to org 2 BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) @@ -220,9 +208,7 @@ void testListOrganizationsByUserId(final Boolean withKeywordSearch, final Boolea .withOrganizationId(orgId3) .withUserId(UUID.randomUUID()) .withName("randomName") - .withEmail("email3") - .withPba(false) - .withOrgLevelBilling(false)); + .withEmail("email3")); // grant user a read access to org 3 BaseConfigDatabaseTest.writePermission(new Permission() .withPermissionId(UUID.randomUUID()) diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java index 227d995e0f4..b01b4846967 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java @@ -179,17 +179,13 @@ class UserAccessTests { .withUserId(UUID.randomUUID()) .withOrganizationId(UUID.randomUUID()) .withName("Org") - .withEmail("test@org.com") - .withPba(false) - .withOrgLevelBilling(false); + .withEmail("test@org.com"); private static final Organization ORG_2 = new Organization() .withUserId(UUID.randomUUID()) .withOrganizationId(UUID.randomUUID()) .withName("Org 2") - .withEmail("test@org.com") - .withPba(false) - .withOrgLevelBilling(false); + .withEmail("test@org.com"); private static final StandardWorkspace WORKSPACE_1_ORG_1 = new StandardWorkspace() .withWorkspaceId(UUID.randomUUID()) diff --git a/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java b/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java index 4115c4c21b6..dd7de87a4e0 100644 --- a/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java +++ b/airbyte-config/config-persistence/src/testFixtures/java/io/airbyte/config/persistence/MockData.java @@ -69,10 +69,10 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.TreeMap; import java.util.UUID; import java.util.stream.Collectors; -import lombok.Data; @SuppressWarnings("LineLength") public class MockData { @@ -310,14 +310,11 @@ public static List permissions() { public static List organizations() { final Organization organization1 = - new Organization().withOrganizationId(ORGANIZATION_ID_1).withName("organization-1").withEmail("email@email.com").withPba(false) - .withOrgLevelBilling(false); + new Organization().withOrganizationId(ORGANIZATION_ID_1).withName("organization-1").withEmail("email@email.com"); final Organization organization2 = - new Organization().withOrganizationId(ORGANIZATION_ID_2).withName("organization-2").withEmail("email2@email.com").withPba(false) - .withOrgLevelBilling(false); + new Organization().withOrganizationId(ORGANIZATION_ID_2).withName("organization-2").withEmail("email2@email.com"); final Organization organization3 = - new Organization().withOrganizationId(ORGANIZATION_ID_3).withName("organization-3").withEmail("emai3l@email.com").withPba(false) - .withOrgLevelBilling(false); + new Organization().withOrganizationId(ORGANIZATION_ID_3).withName("organization-3").withEmail("emai3l@email.com"); return Arrays.asList(organization1, organization2, organization3); } @@ -889,18 +886,50 @@ public static Organization defaultOrganization() { return new Organization() .withOrganizationId(DEFAULT_ORGANIZATION_ID) .withName("default org") - .withEmail("test@test.com") - .withPba(false) - .withOrgLevelBilling(false); + .withEmail("test@test.com"); } - @Data public static class ActorCatalogFetchEventWithCreationDate { private final ActorCatalogFetchEvent actorCatalogFetchEvent; private final OffsetDateTime createdAt; + public ActorCatalogFetchEventWithCreationDate(ActorCatalogFetchEvent actorCatalogFetchEvent, OffsetDateTime createdAt) { + this.actorCatalogFetchEvent = actorCatalogFetchEvent; + this.createdAt = createdAt; + } + + public ActorCatalogFetchEvent getActorCatalogFetchEvent() { + return actorCatalogFetchEvent; + } + + public OffsetDateTime getCreatedAt() { + return createdAt; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + ActorCatalogFetchEventWithCreationDate that = (ActorCatalogFetchEventWithCreationDate) o; + return Objects.equals(actorCatalogFetchEvent, that.actorCatalogFetchEvent) && Objects.equals(createdAt, that.createdAt); + } + + @Override + public int hashCode() { + return Objects.hash(actorCatalogFetchEvent, createdAt); + } + + @Override + public String toString() { + return "ActorCatalogFetchEventWithCreationDate{" + + "actorCatalogFetchEvent=" + actorCatalogFetchEvent + + ", createdAt=" + createdAt + + '}'; + } + } public static List actorCatalogFetchEventsForAggregationTest() { diff --git a/airbyte-config/init/build.gradle.kts b/airbyte-config/init/build.gradle.kts index 00e4b81bf4c..2cc2b644fc0 100644 --- a/airbyte-config/init/build.gradle.kts +++ b/airbyte-config/init/build.gradle.kts @@ -5,9 +5,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - ksp(platform(libs.micronaut.platform)) ksp(libs.bundles.micronaut.annotation.processor) @@ -49,10 +46,11 @@ airbyte { } } -val copyScripts = tasks.register("copyScripts") { - from("scripts") - into("build/airbyte/docker/bin/scripts") -} +val copyScripts = + tasks.register("copyScripts") { + from("scripts") + into("build/airbyte/docker/bin/scripts") + } tasks.named("dockerCopyDistribution") { dependsOn(copyScripts) diff --git a/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/ApplyDefinitionsHelper.kt b/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/ApplyDefinitionsHelper.kt index 9436c355921..60a5bf47cec 100644 --- a/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/ApplyDefinitionsHelper.kt +++ b/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/ApplyDefinitionsHelper.kt @@ -9,6 +9,7 @@ import io.airbyte.commons.version.AirbyteProtocolVersionRange import io.airbyte.config.ActorDefinitionBreakingChange import io.airbyte.config.ActorDefinitionVersion import io.airbyte.config.ActorType +import io.airbyte.config.Configs.SeedDefinitionsProviderType import io.airbyte.config.ConnectorEnumRolloutState import io.airbyte.config.ConnectorRegistryDestinationDefinition import io.airbyte.config.ConnectorRegistrySourceDefinition @@ -49,6 +50,7 @@ import kotlin.jvm.optionals.getOrNull @Requires(bean = MetricClient::class) class ApplyDefinitionsHelper( @param:Named("seedDefinitionsProvider") private val definitionsProvider: DefinitionsProvider, + private val seedProviderType: SeedDefinitionsProviderType, private val jobPersistence: JobPersistence, private val actorDefinitionService: ActorDefinitionService, private val sourceService: SourceService, @@ -201,7 +203,7 @@ class ApplyDefinitionsHelper( } @VisibleForTesting - fun applyReleaseCandidates(rcDefinitions: List) { + internal fun applyReleaseCandidates(rcDefinitions: List) { for (rcDef in rcDefinitions) { val rcAdv = when (rcDef) { @@ -349,13 +351,24 @@ class ApplyDefinitionsHelper( return reImportVersionInUse && definitionIsInUse } - private fun getShouldUpdateActorDefinitionDefaultVersion( + @VisibleForTesting + internal fun getShouldUpdateActorDefinitionDefaultVersion( currentDefaultADV: ActorDefinitionVersion, newADV: ActorDefinitionVersion, actorDefinitionIdsInUse: Set, updateAll: Boolean, ): Boolean { - val newVersionIsAvailable = newADV.dockerImageTag != currentDefaultADV.dockerImageTag + val newVersionIsAvailable = + when (seedProviderType) { + SeedDefinitionsProviderType.REMOTE -> newADV.dockerImageTag != currentDefaultADV.dockerImageTag + SeedDefinitionsProviderType.LOCAL -> { + // (oss) if we're using the registry shipped with the platform, connector versions may be stale. + // We should only update if the new version is greater than the current version, in case the user has manually + // upgraded the connector via the UI. See https://github.com/airbytehq/airbyte-internal-issues/issues/8691. + newADV.dockerImageTag > currentDefaultADV.dockerImageTag + } + } + val definitionIsInUse = actorDefinitionIdsInUse.contains(currentDefaultADV.actorDefinitionId) val shouldApplyNewVersion = updateAll || !definitionIsInUse diff --git a/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/DeclarativeSourceUpdater.kt b/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/DeclarativeSourceUpdater.kt index e4446d2c180..fc19b097803 100644 --- a/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/DeclarativeSourceUpdater.kt +++ b/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/DeclarativeSourceUpdater.kt @@ -5,6 +5,10 @@ package io.airbyte.config.init import io.airbyte.data.services.ActorDefinitionService import io.airbyte.data.services.DeclarativeManifestImageVersionService +import io.airbyte.featureflag.ANONYMOUS +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.RunDeclarativeSourcesUpdater +import io.airbyte.featureflag.Workspace import org.slf4j.LoggerFactory /** @@ -18,12 +22,18 @@ class DeclarativeSourceUpdater( private val declarativeManifestImageVersionService: DeclarativeManifestImageVersionService, private val actorDefinitionService: ActorDefinitionService, private val airbyteCompatibleConnectorsValidator: AirbyteCompatibleConnectorsValidator, + private val featureFlagClient: FeatureFlagClient, ) { companion object { private val log = LoggerFactory.getLogger(DeclarativeSourceUpdater::class.java) } fun apply() { + if (!featureFlagClient.boolVariation(RunDeclarativeSourcesUpdater, Workspace(ANONYMOUS))) { + log.info("Declarative sources update feature flag is disabled. Skipping updating declarative sources.") + return + } + val currentDeclarativeManifestImageVersions = declarativeManifestImageVersionService.listDeclarativeManifestImageVersions() val latestDeclarativeManifestImageVersions = declarativeManifestImageVersionsProvider.getLatestDeclarativeManifestImageVersions() diff --git a/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/config/SeedBeanFactory.kt b/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/config/SeedBeanFactory.kt index a44b2fa5b7d..dc074209c65 100644 --- a/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/config/SeedBeanFactory.kt +++ b/airbyte-config/init/src/main/kotlin/io/airbyte/config/init/config/SeedBeanFactory.kt @@ -3,6 +3,7 @@ */ package io.airbyte.config.init.config +import io.airbyte.config.Configs.SeedDefinitionsProviderType import io.airbyte.config.init.AirbyteCompatibleConnectorsValidator import io.airbyte.config.init.DeclarativeManifestImageVersionsProvider import io.airbyte.config.init.DeclarativeSourceUpdater @@ -11,6 +12,7 @@ import io.airbyte.config.specs.LocalDefinitionsProvider import io.airbyte.config.specs.RemoteDefinitionsProvider import io.airbyte.data.services.ActorDefinitionService import io.airbyte.data.services.DeclarativeManifestImageVersionService +import io.airbyte.featureflag.FeatureFlagClient import io.micronaut.context.annotation.Factory import io.micronaut.context.annotation.Value import io.micronaut.core.util.StringUtils @@ -32,15 +34,29 @@ class SeedBeanFactory { @Singleton @Named("seedDefinitionsProvider") fun seedDefinitionsProvider( - @Value("\${airbyte.connector-registry.seed-provider}") seedProvider: String, + seedProvider: SeedDefinitionsProviderType, remoteDefinitionsProvider: RemoteDefinitionsProvider, ): DefinitionsProvider { + return when (seedProvider) { + SeedDefinitionsProviderType.LOCAL -> { + LOGGER.info("Using local definitions provider for seeding") + LocalDefinitionsProvider() + } + SeedDefinitionsProviderType.REMOTE -> { + LOGGER.info("Using remote definitions provider for seeding") + remoteDefinitionsProvider + } + } + } + + @Singleton + fun seedDefinitionsProviderType( + @Value("\${airbyte.connector-registry.seed-provider}") seedProvider: String, + ): SeedDefinitionsProviderType { if (StringUtils.isEmpty(seedProvider) || LOCAL_SEED_PROVIDER.equals(seedProvider, ignoreCase = true)) { - LOGGER.info("Using local definitions provider for seeding") - return LocalDefinitionsProvider() + return SeedDefinitionsProviderType.LOCAL } else if (REMOTE_SEED_PROVIDER.equals(seedProvider, ignoreCase = true)) { - LOGGER.info("Using remote definitions provider for seeding") - return remoteDefinitionsProvider + return SeedDefinitionsProviderType.REMOTE } throw IllegalArgumentException("Invalid seed provider: $seedProvider") @@ -53,12 +69,14 @@ class SeedBeanFactory { declarativeManifestImageVersionService: DeclarativeManifestImageVersionService, actorDefinitionService: ActorDefinitionService, airbyteCompatibleConnectorsValidator: AirbyteCompatibleConnectorsValidator, + featureFlagClient: FeatureFlagClient, ): DeclarativeSourceUpdater { return DeclarativeSourceUpdater( declarativeManifestImageVersionsProvider, declarativeManifestImageVersionService, actorDefinitionService, airbyteCompatibleConnectorsValidator, + featureFlagClient, ) } @@ -69,12 +87,14 @@ class SeedBeanFactory { declarativeManifestImageVersionService: DeclarativeManifestImageVersionService, actorDefinitionService: ActorDefinitionService, airbyteCompatibleConnectorsValidator: AirbyteCompatibleConnectorsValidator, + featureFlagClient: FeatureFlagClient, ): DeclarativeSourceUpdater { return DeclarativeSourceUpdater( declarativeManifestImageVersionsProvider, declarativeManifestImageVersionService, actorDefinitionService, airbyteCompatibleConnectorsValidator, + featureFlagClient, ) } diff --git a/airbyte-config/init/src/test/kotlin/io/airbyte/config/init/ApplyDefinitionsHelperTest.kt b/airbyte-config/init/src/test/kotlin/io/airbyte/config/init/ApplyDefinitionsHelperTest.kt index c0f0be7d222..d8d500fd30d 100644 --- a/airbyte-config/init/src/test/kotlin/io/airbyte/config/init/ApplyDefinitionsHelperTest.kt +++ b/airbyte-config/init/src/test/kotlin/io/airbyte/config/init/ApplyDefinitionsHelperTest.kt @@ -8,6 +8,7 @@ import io.airbyte.commons.version.AirbyteProtocolVersionRange import io.airbyte.commons.version.Version import io.airbyte.config.ActorDefinitionVersion import io.airbyte.config.BreakingChanges +import io.airbyte.config.Configs.SeedDefinitionsProviderType import io.airbyte.config.ConnectorEnumRolloutState import io.airbyte.config.ConnectorRegistryDestinationDefinition import io.airbyte.config.ConnectorRegistrySourceDefinition @@ -40,6 +41,8 @@ import io.mockk.justRun import io.mockk.mockk import io.mockk.verify import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows @@ -65,6 +68,7 @@ internal class ApplyDefinitionsHelperTest { private val actorDefinitionVersionResolver: ActorDefinitionVersionResolver = mockk() private val airbyteCompatibleConnectorsValidator: AirbyteCompatibleConnectorsValidator = mockk() private val connectorRolloutService: ConnectorRolloutService = mockk() + private val seedDefinitionsProviderType: SeedDefinitionsProviderType = mockk() private lateinit var applyDefinitionsHelper: ApplyDefinitionsHelper @BeforeEach @@ -72,6 +76,7 @@ internal class ApplyDefinitionsHelperTest { applyDefinitionsHelper = ApplyDefinitionsHelper( definitionsProvider, + seedDefinitionsProviderType, jobPersistence, actorDefinitionService, sourceService, @@ -90,6 +95,7 @@ internal class ApplyDefinitionsHelperTest { every { airbyteCompatibleConnectorsValidator.validate(any(), any()) } returns ConnectorPlatformCompatibilityValidationResult(true, null) every { jobPersistence.currentProtocolVersionRange } returns Optional.of(AirbyteProtocolVersionRange(Version("2.0.0"), Version("3.0.0"))) every { actorDefinitionVersionResolver.fetchRemoteActorDefinitionVersion(any(), any(), any()) } returns Optional.empty() + every { seedDefinitionsProviderType.ordinal } returns SeedDefinitionsProviderType.REMOTE.ordinal mockVoidReturningFunctions() } @@ -856,6 +862,89 @@ internal class ApplyDefinitionsHelperTest { confirmVerified(actorDefinitionService, sourceService, destinationService, supportStateUpdater, metricClient) } + @ParameterizedTest + @MethodSource("updateScenarioWithSeedType") + fun `should only perform version rollbacks when using remote definitions provider`( + updateAll: Boolean, + isInUse: Boolean, + seedType: SeedDefinitionsProviderType, + ) { + every { seedDefinitionsProviderType.ordinal } returns seedType.ordinal + + val currentVersion = ConnectorRegistryConverters.toActorDefinitionVersion(SOURCE_POSTGRES_2) + val newVersion = ConnectorRegistryConverters.toActorDefinitionVersion(SOURCE_POSTGRES) + + val definitionsInUse = if (isInUse) setOf(currentVersion.actorDefinitionId) else setOf() + + val shouldUpdateVersion = + applyDefinitionsHelper.getShouldUpdateActorDefinitionDefaultVersion( + currentVersion, + newVersion, + definitionsInUse, + updateAll, + ) + + if (seedType == SeedDefinitionsProviderType.REMOTE && (!isInUse || updateAll)) { + assertTrue(shouldUpdateVersion) + } else { + assertFalse(shouldUpdateVersion) + } + } + + @ParameterizedTest + @MethodSource("updateScenarioWithSeedType") + fun `should perform version upgrades regardless of definitions provider`( + updateAll: Boolean, + isInUse: Boolean, + seedType: SeedDefinitionsProviderType, + ) { + every { seedDefinitionsProviderType.ordinal } returns seedType.ordinal + + val currentVersion = ConnectorRegistryConverters.toActorDefinitionVersion(SOURCE_POSTGRES) + val newVersion = ConnectorRegistryConverters.toActorDefinitionVersion(SOURCE_POSTGRES_2) + + val definitionsInUse = if (isInUse) setOf(currentVersion.actorDefinitionId) else setOf() + + val shouldUpdateVersion = + applyDefinitionsHelper.getShouldUpdateActorDefinitionDefaultVersion( + currentVersion, + newVersion, + definitionsInUse, + updateAll, + ) + + if (!isInUse || updateAll) { + assertTrue(shouldUpdateVersion) + } else { + assertFalse(shouldUpdateVersion) + } + } + + @ParameterizedTest + @MethodSource("updateScenarioWithSeedType") + fun `should not try to update the connector version if it is already matching`( + updateAll: Boolean, + isInUse: Boolean, + seedType: SeedDefinitionsProviderType, + ) { + every { seedDefinitionsProviderType.ordinal } returns seedType.ordinal + + val currentVersion = ConnectorRegistryConverters.toActorDefinitionVersion(SOURCE_POSTGRES) + val newVersion = ConnectorRegistryConverters.toActorDefinitionVersion(SOURCE_POSTGRES) + + val definitionsInUse = if (isInUse) setOf(currentVersion.actorDefinitionId) else setOf() + + val shouldUpdateVersion = + applyDefinitionsHelper.getShouldUpdateActorDefinitionDefaultVersion( + currentVersion, + newVersion, + definitionsInUse, + updateAll, + ) + + assertFalse(shouldUpdateVersion) + } + companion object { private const val INITIAL_CONNECTOR_VERSION = "0.1.0" private const val UPDATED_CONNECTOR_VERSION = "0.2.0" @@ -998,6 +1087,19 @@ internal class ApplyDefinitionsHelperTest { Arguments.of(false, true), ) + @JvmStatic + fun updateScenarioWithSeedType(): Stream = + Stream.of( + Arguments.of(true, true, SeedDefinitionsProviderType.REMOTE), + Arguments.of(true, false, SeedDefinitionsProviderType.REMOTE), + Arguments.of(false, false, SeedDefinitionsProviderType.REMOTE), + Arguments.of(false, true, SeedDefinitionsProviderType.REMOTE), + Arguments.of(true, true, SeedDefinitionsProviderType.LOCAL), + Arguments.of(true, false, SeedDefinitionsProviderType.LOCAL), + Arguments.of(false, false, SeedDefinitionsProviderType.LOCAL), + Arguments.of(false, true, SeedDefinitionsProviderType.LOCAL), + ) + @JvmStatic fun validInsertStates() = listOf(ConnectorEnumRolloutState.CANCELED) diff --git a/airbyte-config/init/src/test/kotlin/io/airbyte/config/init/DeclarativeSourceUpdaterTest.kt b/airbyte-config/init/src/test/kotlin/io/airbyte/config/init/DeclarativeSourceUpdaterTest.kt index cc32e496cfd..cfaae09dc38 100644 --- a/airbyte-config/init/src/test/kotlin/io/airbyte/config/init/DeclarativeSourceUpdaterTest.kt +++ b/airbyte-config/init/src/test/kotlin/io/airbyte/config/init/DeclarativeSourceUpdaterTest.kt @@ -5,6 +5,10 @@ package io.airbyte.config.init import io.airbyte.data.repositories.entities.DeclarativeManifestImageVersion import io.airbyte.data.services.ActorDefinitionService import io.airbyte.data.services.DeclarativeManifestImageVersionService +import io.airbyte.featureflag.ANONYMOUS +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.RunDeclarativeSourcesUpdater +import io.airbyte.featureflag.Workspace import io.mockk.confirmVerified import io.mockk.every import io.mockk.justRun @@ -19,6 +23,7 @@ internal class DeclarativeSourceUpdaterTest { private var mDeclarativeManifestImageVersionService: DeclarativeManifestImageVersionService = mockk() private var mActorDefinitionService: ActorDefinitionService = mockk() private var airbyteCompatibleConnectorsValidator: AirbyteCompatibleConnectorsValidator = mockk() + private var featureFlagClient: FeatureFlagClient = mockk() private lateinit var declarativeSourceUpdater: DeclarativeSourceUpdater @BeforeEach @@ -29,6 +34,7 @@ internal class DeclarativeSourceUpdaterTest { mDeclarativeManifestImageVersionService, mActorDefinitionService, airbyteCompatibleConnectorsValidator, + featureFlagClient, ) justRun { mDeclarativeManifestImageVersionService.writeDeclarativeManifestImageVersion(any()) } @@ -38,6 +44,7 @@ internal class DeclarativeSourceUpdaterTest { } returns DeclarativeManifestImageVersion(0, "0.1.0", testSha) every { mActorDefinitionService.updateDeclarativeActorDefinitionVersions(any(), any()) } returns 1 every { airbyteCompatibleConnectorsValidator.validateDeclarativeManifest(any()) } returns ConnectorPlatformCompatibilityValidationResult(true, "") + every { featureFlagClient.boolVariation(RunDeclarativeSourcesUpdater, Workspace(ANONYMOUS)) } returns true } @Test @@ -58,7 +65,7 @@ internal class DeclarativeSourceUpdaterTest { } @Test - fun `new cdk versions are added to database and actor definitions are updated`() { + fun `new sdm versions are added to database and actor definitions are updated`() { val oldVersion0 = DeclarativeManifestImageVersion(0, "0.1.0", testSha) val oldVersion1 = DeclarativeManifestImageVersion(1, "1.0.0", testSha) val newVersion1 = DeclarativeManifestImageVersion(1, "1.0.1", testSha) @@ -74,6 +81,22 @@ internal class DeclarativeSourceUpdaterTest { confirmVerified(mDeclarativeManifestImageVersionService, mActorDefinitionService, mDeclarativeManifestImageVersionsProvider) } + @Test + fun `versions are rolled back if the image in the db is no longer found in docker hub`() { + val olderVersion1 = DeclarativeManifestImageVersion(1, "1.0.0", testSha) + val laterVersion1 = DeclarativeManifestImageVersion(1, "1.0.1", testSha) + every { mDeclarativeManifestImageVersionService.listDeclarativeManifestImageVersions() } returns listOf(laterVersion1) + every { mDeclarativeManifestImageVersionsProvider.getLatestDeclarativeManifestImageVersions() } returns listOf(olderVersion1) + + declarativeSourceUpdater.apply() + + verify(exactly = 1) { mDeclarativeManifestImageVersionsProvider.getLatestDeclarativeManifestImageVersions() } + verify(exactly = 1) { mDeclarativeManifestImageVersionService.listDeclarativeManifestImageVersions() } + verify(exactly = 1) { mDeclarativeManifestImageVersionService.writeDeclarativeManifestImageVersion(olderVersion1) } + verify(exactly = 1) { mActorDefinitionService.updateDeclarativeActorDefinitionVersions("1.0.1", "1.0.0") } + confirmVerified(mDeclarativeManifestImageVersionService, mActorDefinitionService, mDeclarativeManifestImageVersionsProvider) + } + @Test fun `same declarative manifest versions do not result in any calls to actor definition service`() { val oldVersion0 = DeclarativeManifestImageVersion(0, "0.1.0", testSha) @@ -123,4 +146,14 @@ internal class DeclarativeSourceUpdaterTest { verify(exactly = 0) { mActorDefinitionService.updateDeclarativeActorDefinitionVersions("1.0.0", "1.0.1") } confirmVerified(mDeclarativeManifestImageVersionService, mActorDefinitionService, mDeclarativeManifestImageVersionsProvider) } + + @Test + fun `feature flag should turn off behavior`() { + every { featureFlagClient.boolVariation(RunDeclarativeSourcesUpdater, Workspace(ANONYMOUS)) } returns false + declarativeSourceUpdater.apply() + + verify(exactly = 0) { mDeclarativeManifestImageVersionsProvider.getLatestDeclarativeManifestImageVersions() } + verify(exactly = 0) { mDeclarativeManifestImageVersionService.listDeclarativeManifestImageVersions() } + confirmVerified(mDeclarativeManifestImageVersionService, mActorDefinitionService, mDeclarativeManifestImageVersionsProvider) + } } diff --git a/airbyte-config/specs/build.gradle.kts b/airbyte-config/specs/build.gradle.kts index b3b735aa7c7..2a53e5e8959 100644 --- a/airbyte-config/specs/build.gradle.kts +++ b/airbyte-config/specs/build.gradle.kts @@ -7,8 +7,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(libs.bundles.micronaut.annotation.processor) api(libs.bundles.micronaut.annotation) @@ -35,19 +33,21 @@ dependencies { airbyte { spotless { - excludes = listOf( - "src/main/resources/seed/oss_registry.json", - "src/main/resources/seed/local_oss_registry.json", - ) + excludes = + listOf( + "src/main/resources/seed/oss_registry.json", + "src/main/resources/seed/local_oss_registry.json", + ) } } -val downloadConnectorRegistry = tasks.register("downloadConnectorRegistry") { - src("https://connectors.airbyte.com/files/registries/v0/oss_registry.json") - dest(File(projectDir, "src/main/resources/seed/local_oss_registry.json")) - overwrite(true) - onlyIfModified(true) -} +val downloadConnectorRegistry = + tasks.register("downloadConnectorRegistry") { + src("https://connectors.airbyte.com/files/registries/v0/oss_registry.json") + dest(File(projectDir, "src/main/resources/seed/local_oss_registry.json")) + overwrite(true) + onlyIfModified(true) + } tasks.processResources { dependsOn(downloadConnectorRegistry) diff --git a/airbyte-connector-builder-resources/CDK_VERSION b/airbyte-connector-builder-resources/CDK_VERSION index 5e3ec53ea67..6b9255cf0e6 100644 --- a/airbyte-connector-builder-resources/CDK_VERSION +++ b/airbyte-connector-builder-resources/CDK_VERSION @@ -1 +1 @@ -6.5.2 +6.9.2 diff --git a/airbyte-connector-builder-server/Dockerfile b/airbyte-connector-builder-server/Dockerfile index 022ccf8141e..75d06c5a472 100644 --- a/airbyte-connector-builder-server/Dockerfile +++ b/airbyte-connector-builder-server/Dockerfile @@ -2,7 +2,7 @@ ARG JAVA_PYTHON_BASE_IMAGE_VERSION=2.2.1 FROM airbyte/airbyte-base-java-python-image:${JAVA_PYTHON_BASE_IMAGE_VERSION} AS connector-builder-server # Set up CDK requirements -ARG CDK_VERSION=6.5.2 +ARG CDK_VERSION=6.9.2 ENV CDK_PYTHON=${PYENV_ROOT}/versions/${PYTHON_VERSION}/bin/python ENV CDK_ENTRYPOINT ${PYENV_ROOT}/versions/${PYTHON_VERSION}/lib/python3.10/site-packages/airbyte_cdk/connector_builder/main.py # Set up CDK diff --git a/airbyte-connector-builder-server/requirements.in b/airbyte-connector-builder-server/requirements.in index 3e8ebade345..ae9573228eb 100644 --- a/airbyte-connector-builder-server/requirements.in +++ b/airbyte-connector-builder-server/requirements.in @@ -1 +1 @@ -airbyte-cdk==6.5.2 +airbyte-cdk==6.9.2 diff --git a/airbyte-connector-builder-server/requirements.txt b/airbyte-connector-builder-server/requirements.txt index d29b16ede32..d17b299b080 100644 --- a/airbyte-connector-builder-server/requirements.txt +++ b/airbyte-connector-builder-server/requirements.txt @@ -4,13 +4,13 @@ # # pip-compile # -airbyte-cdk==6.5.2 +airbyte-cdk==6.9.2 # via -r requirements.in -airbyte-protocol-models-dataclasses==0.13.1 +airbyte-protocol-models-dataclasses==0.14.1 # via airbyte-cdk annotated-types==0.7.0 # via pydantic -anyio==4.6.2.post1 +anyio==4.7.0 # via httpx attributes-doc==0.4.0 # via serpyco-rs @@ -38,23 +38,23 @@ charset-normalizer==3.4.0 # via requests click==8.1.7 # via nltk -cryptography==42.0.8 - # via airbyte-cdk -deprecated==1.2.14 +cryptography==43.0.3 # via airbyte-cdk dpath==2.2.0 # via airbyte-cdk +dunamai==1.23.0 + # via airbyte-cdk exceptiongroup==1.2.2 # via # anyio # cattrs -genson==1.2.2 +genson==1.3.0 # via airbyte-cdk h11==0.14.0 # via httpcore -httpcore==1.0.6 +httpcore==1.0.7 # via httpx -httpx==0.27.2 +httpx==0.28.1 # via langsmith idna==3.10 # via @@ -73,26 +73,28 @@ jsonpointer==3.0.0 # via jsonpatch jsonref==0.2 # via airbyte-cdk -jsonschema==3.2.0 +jsonschema==4.17.3 # via airbyte-cdk langchain-core==0.1.42 # via airbyte-cdk -langsmith==0.1.142 +langsmith==0.1.147 # via langchain-core markupsafe==3.0.2 # via jinja2 -nltk==3.8.1 +nltk==3.9.1 # via airbyte-cdk numpy==1.26.4 # via # airbyte-cdk # pandas -orjson==3.10.11 +orjson==3.10.12 # via # airbyte-cdk # langsmith packaging==23.2 - # via langchain-core + # via + # dunamai + # langchain-core pandas==2.2.2 # via airbyte-cdk pendulum==2.1.2 @@ -103,14 +105,14 @@ psutil==6.1.0 # via airbyte-cdk pycparser==2.22 # via cffi -pydantic==2.9.2 +pydantic==2.10.3 # via # airbyte-cdk # langchain-core # langsmith -pydantic-core==2.23.4 +pydantic-core==2.27.1 # via pydantic -pyjwt==2.9.0 +pyjwt==2.10.1 # via airbyte-cdk pyrate-limiter==3.1.1 # via airbyte-cdk @@ -121,6 +123,8 @@ python-dateutil==2.9.0.post0 # airbyte-cdk # pandas # pendulum +python-ulid==3.0.0 + # via airbyte-cdk pytz==2024.1 # via # airbyte-cdk @@ -131,6 +135,8 @@ pyyaml==6.0.2 # via # airbyte-cdk # langchain-core +rapidfuzz==3.10.1 + # via airbyte-cdk regex==2024.11.6 # via nltk requests==2.32.3 @@ -145,19 +151,16 @@ requests-toolbelt==1.0.0 # via langsmith serpyco-rs==1.11.0 # via airbyte-cdk -six==1.16.0 +six==1.17.0 # via # isodate - # jsonschema # python-dateutil # url-normalize sniffio==1.3.1 - # via - # anyio - # httpx + # via anyio tenacity==8.5.0 # via langchain-core -tqdm==4.67.0 +tqdm==4.67.1 # via nltk typing-extensions==4.12.2 # via @@ -174,12 +177,7 @@ urllib3==2.2.3 # via # requests # requests-cache -wcmatch==8.4 +wcmatch==10.0 # via airbyte-cdk -wrapt==1.16.0 - # via deprecated xmltodict==0.13.0 # via airbyte-cdk - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/airbyte-connector-builder-server/src/main/resources/application.yml b/airbyte-connector-builder-server/src/main/resources/application.yml index e514790675a..d0bc10099db 100644 --- a/airbyte-connector-builder-server/src/main/resources/application.yml +++ b/airbyte-connector-builder-server/src/main/resources/application.yml @@ -85,6 +85,13 @@ airbyte: docker-mount: ${WORKSPACE_DOCKER_MOUNT:} root: ${WORKSPACE_ROOT} + # minimal configuration to authenticate internal service accounts w/ keycloak + keycloak: + host: ${KEYCLOAK_HOST:} + protocol: ${KEYCLOAK_PROTOCOL:http} + base-path: ${KEYCLOAK_BASE_PATH:`/auth`} + internal-realm: ${KEYCLOAK_INTERNAL_REALM:_airbyte-internal} + endpoints: beans: enabled: true diff --git a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutClient.kt b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutClient.kt index eac457bc1a6..258505b50df 100644 --- a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutClient.kt +++ b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutClient.kt @@ -62,7 +62,7 @@ class ConnectorRolloutClient throw RuntimeException("Rollout ID is required to start a rollout workflow") } - val workflowId = getWorkflowId(input.dockerRepository, input.dockerImageTag, input.actorDefinitionId) + val workflowId = getWorkflowId(input.dockerRepository, input.dockerImageTag, input.connectorRollout!!.actorDefinitionId) val workflowStub = workflowClient.getClient().newWorkflowStub( ConnectorRolloutWorkflow::class.java, diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/RolloutActorFinder.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/RolloutActorFinder.kt index 31e54c5badb..4e9780e6639 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/RolloutActorFinder.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/RolloutActorFinder.kt @@ -14,8 +14,10 @@ import io.airbyte.config.StandardSync import io.airbyte.data.exceptions.ConfigNotFoundException import io.airbyte.data.helpers.ActorDefinitionVersionUpdater import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.CustomerTier import io.airbyte.data.services.DestinationService import io.airbyte.data.services.JobService +import io.airbyte.data.services.OrganizationCustomerAttributesService import io.airbyte.data.services.ScopedConfigurationService import io.airbyte.data.services.SourceService import io.airbyte.data.services.shared.ConfigScopeMapWithId @@ -53,6 +55,7 @@ class RolloutActorFinder( private val scopedConfigurationService: ScopedConfigurationService, private val sourceService: SourceService, private val destinationService: DestinationService, + private val organizationCustomerAttributesService: OrganizationCustomerAttributesService, ) { fun getActorSelectionInfo( connectorRollout: ConnectorRollout, @@ -88,7 +91,7 @@ class RolloutActorFinder( logger.info { "Rollout ${connectorRollout.id}: $nEligibleOrAlreadyPinned including eligible & already pinned to the release candidate" } logger.info { "Rollout ${connectorRollout.id}: ${nEligibleOrAlreadyPinned - candidates.size - nPreviouslyPinned} pinned to a non-RC" } - if (targetPercent == null) { + if (targetPercent == null || targetPercent == 0) { return ActorSelectionInfo( actorIdsToPin = emptyList(), nActors = initialNCandidates, @@ -100,13 +103,20 @@ class RolloutActorFinder( // Calculate the number to pin based on the input percentage val targetTotalToPin = ceil(nEligibleOrAlreadyPinned * targetPercent / 100.0).toInt() + val filteredActorDefinitionConnections = filterByConnectionActorId(candidates, sortedActorDefinitionConnections, actorType) + logger.info { + "Rollout ${connectorRollout.id}: " + + "candidates.size=$candidates.size " + + "sortedActorDefinitionConnections.size=${sortedActorDefinitionConnections.size} " + + "filteredActorDefinitionConnections.size=${filteredActorDefinitionConnections.size}" + } // From the eligible actors, choose the ones with the next sync // TODO: filter out those with lots of data // TODO: prioritize internal connections val actorIdsToPin = getUniqueActorIds( - sortedActorDefinitionConnections.filter { candidates.map { it.id }.contains(it.sourceId ?: it.destinationId) }, + filteredActorDefinitionConnections, targetTotalToPin - nPreviouslyPinned, actorType, ) @@ -130,6 +140,25 @@ class RolloutActorFinder( ) } + @VisibleForTesting + internal fun filterByConnectionActorId( + candidates: Collection, + sortedActorDefinitionConnections: List, + actorType: ActorType, + ): List { + val candidateIds = candidates.map { it.id }.toSet() + + return sortedActorDefinitionConnections.filter { connection -> + val relevantId = + if (actorType == ActorType.SOURCE) { + connection.sourceId + } else { + connection.destinationId + } + candidateIds.contains(relevantId) + } + } + fun getSyncInfoForPinnedActors(connectorRollout: ConnectorRollout): Map { val actorType = getActorType(connectorRollout.actorDefinitionId) @@ -258,26 +287,15 @@ class RolloutActorFinder( @VisibleForTesting fun filterByTier(candidates: Collection): Collection { - // TODO - filter out the ineligible actors (workspace in the list of tier 0/1 customers) - // Query from https://airbytehq-team.slack.com/archives/C06AZD64PDJ/p1727885479202429?thread_ts=1727885477.845219&cid=C06AZD64PDJ - - // val priorityWorkspaces = - // SELECT - // w.workspace_id, - // w.account_id, - // sc.customer_tier - // FROM - // airbyte-data-prod.airbyte_warehouse.workspace w - // JOIN - // airbyte-data-prod.airbyte_warehouse.support_case sc - // ON - // w.account_id = sc.account_id - // WHERE - // lower(sc.customer_tier) IN ('tier 1', 'tier 0') - // - // candidates = candidates.filter { - // !priorityWorkspaces.contains(it.workspaceId) - // } - return candidates + val organizationTiers = organizationCustomerAttributesService.getOrganizationTiers() + logger.debug { "RolloutActorFinder.filterByTier: organizationTiers=$organizationTiers" } + return candidates.filter { candidate -> + val organizationId = candidate.scopeMap[ConfigScopeType.ORGANIZATION] + // Include the candidate if the organization ID is not in the map or if the CustomerTier is not TIER_0 or TIER_1 + organizationId == null || organizationTiers[organizationId]?.let { tier -> + tier != CustomerTier.TIER_0 && tier != CustomerTier.TIER_1 + } ?: true + } } @VisibleForTesting @@ -339,7 +357,7 @@ class RolloutActorFinder( ) logger.info { "getSortedActorDefinitionConnections connections=${connections.size}" } for (connection in connections) { - logger.info { "getSortedActorDefinitionConnections connection sourceId=${connection.sourceId} destId=${connection.destinationId}" } + logger.debug { "getSortedActorDefinitionConnections connection sourceId=${connection.sourceId} destId=${connection.destinationId}" } } val sortedSyncs = @@ -356,7 +374,7 @@ class RolloutActorFinder( logger.info { "Connector rollout sorted actor definition connections: sortedSyncs.size=${sortedSyncs.size}" } for (sync in sortedSyncs) { - logger.info { "getSortedActorDefinitionConnections sorted sourceId=${sync.sourceId} destId=${sync.destinationId}" } + logger.debug { "getSortedActorDefinitionConnections sorted sourceId=${sync.sourceId} destId=${sync.destinationId}" } } return sortedSyncs } diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt index 45f945df683..811b428abbc 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt @@ -15,4 +15,5 @@ data class ConnectorRolloutActivityInputFinalize( var failedReason: String? = null, var updatedBy: UUID? = null, var rolloutStrategy: ConnectorEnumRolloutStrategy? = null, + var retainPinsOnCancellation: Boolean = true, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt index 6d3f371297b..7381f1e012f 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt @@ -1,6 +1,9 @@ package io.airbyte.connector.rollout.shared.models +import io.airbyte.api.model.generated.ConnectorRolloutActorSelectionInfo +import io.airbyte.api.model.generated.ConnectorRolloutActorSyncInfo import io.airbyte.config.ConnectorEnumRolloutStrategy +import io.airbyte.config.ConnectorRollout import java.util.UUID data class ConnectorRolloutActivityInputStart( @@ -10,4 +13,11 @@ data class ConnectorRolloutActivityInputStart( var rolloutId: UUID, var updatedBy: UUID? = null, var rolloutStrategy: ConnectorEnumRolloutStrategy? = null, + var initialVersionDockerImageTag: String? = null, + var connectorRollout: ConnectorRollout? = null, + var actorSelectionInfo: ConnectorRolloutActorSelectionInfo? = null, + var actorSyncs: List? = null, + var initialRolloutPct: Int? = null, + var finalTargetRolloutPct: Int? = null, + var migratePins: Boolean? = true, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutOutput.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutOutput.kt index 6ba75009287..9ceecef62d2 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutOutput.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutOutput.kt @@ -1,5 +1,7 @@ package io.airbyte.connector.rollout.shared.models +import io.airbyte.api.model.generated.ConnectorRolloutActorSelectionInfo +import io.airbyte.api.model.generated.ConnectorRolloutActorSyncInfo import io.airbyte.config.ConnectorEnumRolloutState import io.airbyte.config.ConnectorEnumRolloutStrategy import java.time.OffsetDateTime @@ -25,5 +27,6 @@ data class ConnectorRolloutOutput( var expiresAt: OffsetDateTime? = null, var errorMsg: String? = null, var failedReason: String? = null, - var actorIds: List? = null, + var actorSelectionInfo: ConnectorRolloutActorSelectionInfo? = null, + var actorSyncs: List? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/temporal/ConnectorRolloutActivityHelpers.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/temporal/ConnectorRolloutActivityHelpers.kt index 260753f2a17..fc4a78ef61a 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/temporal/ConnectorRolloutActivityHelpers.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/temporal/ConnectorRolloutActivityHelpers.kt @@ -33,7 +33,6 @@ object ConnectorRolloutActivityHelpers { expiresAt = rolloutRead.expiresAt, errorMsg = rolloutRead.errorMsg, failedReason = rolloutRead.failedReason, - actorIds = ArrayList(), ) } diff --git a/airbyte-connector-rollout-shared/src/test/kotlin/io/airbyte/connector/rollout/shared/RolloutActorFinderTest.kt b/airbyte-connector-rollout-shared/src/test/kotlin/io/airbyte/connector/rollout/shared/RolloutActorFinderTest.kt index 5bc7ddb39d3..d88a2f4d512 100644 --- a/airbyte-connector-rollout-shared/src/test/kotlin/io/airbyte/connector/rollout/shared/RolloutActorFinderTest.kt +++ b/airbyte-connector-rollout-shared/src/test/kotlin/io/airbyte/connector/rollout/shared/RolloutActorFinderTest.kt @@ -20,8 +20,10 @@ import io.airbyte.config.StandardSync import io.airbyte.data.exceptions.ConfigNotFoundException import io.airbyte.data.helpers.ActorDefinitionVersionUpdater import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.CustomerTier import io.airbyte.data.services.DestinationService import io.airbyte.data.services.JobService +import io.airbyte.data.services.OrganizationCustomerAttributesService import io.airbyte.data.services.ScopedConfigurationService import io.airbyte.data.services.SourceService import io.airbyte.data.services.shared.ConfigScopeMapWithId @@ -52,6 +54,7 @@ class RolloutActorFinderTest { private val scopedConfigurationService = mockk() private val sourceService = mockk() private val destinationService = mockk() + private val organizationCustomerAttributesService = mockk() private val rolloutActorFinder = RolloutActorFinder( actorDefinitionVersionUpdater, @@ -60,6 +63,7 @@ class RolloutActorFinderTest { scopedConfigurationService, sourceService, destinationService, + organizationCustomerAttributesService, ) companion object { @@ -261,6 +265,7 @@ class RolloutActorFinderTest { } returns CONFIG_SCOPE_MAP.map { it.key }.toSet() every { connectionService.listConnectionsByActorDefinitionIdAndType(any(), any(), any()) } returns MOCK_CONNECTION_SYNCS every { jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) } returns JOBS + every { organizationCustomerAttributesService.getOrganizationTiers() } returns emptyMap() val actorSelectionInfo = rolloutActorFinder.getActorSelectionInfo(createMockConnectorRollout(actorDefinitionId), TARGET_PERCENTAGE) @@ -275,6 +280,7 @@ class RolloutActorFinderTest { actorDefinitionVersionUpdater.getUpgradeCandidates(any(), any()) connectionService.listConnectionsByActorDefinitionIdAndType(any(), any(), any()) jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) + organizationCustomerAttributesService.getOrganizationTiers() } if (actorDefinitionId == SOURCE_ACTOR_DEFINITION_ID) { @@ -306,6 +312,7 @@ class RolloutActorFinderTest { } returns CONFIG_SCOPE_MAP.map { it.key }.toSet() every { connectionService.listConnectionsByActorDefinitionIdAndType(any(), any(), any()) } returns MOCK_CONNECTION_SYNCS every { jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) } returns JOBS + every { organizationCustomerAttributesService.getOrganizationTiers() } returns emptyMap() val actorSelectionInfo = rolloutActorFinder.getActorSelectionInfo(createMockConnectorRollout(actorDefinitionId), null) @@ -320,6 +327,7 @@ class RolloutActorFinderTest { actorDefinitionVersionUpdater.getUpgradeCandidates(any(), any()) connectionService.listConnectionsByActorDefinitionIdAndType(any(), any(), any()) jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) + organizationCustomerAttributesService.getOrganizationTiers() } assertEquals(0, actorSelectionInfo.actorIdsToPin.size) @@ -562,6 +570,7 @@ class RolloutActorFinderTest { every { scopedConfigurationService.listScopedConfigurationsWithValues(any(), any(), any(), any(), any(), any()) } returns listOf() every { connectionService.listConnectionsByActorDefinitionIdAndType(any(), any(), any()) } returns MOCK_CONNECTION_SYNCS every { jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) } returns JOBS + every { organizationCustomerAttributesService.getOrganizationTiers() } returns emptyMap() val actorSelectionInfo = rolloutActorFinder.getActorSelectionInfo(createMockConnectorRollout(actorDefinitionId), 1) @@ -576,6 +585,7 @@ class RolloutActorFinderTest { scopedConfigurationService.listScopedConfigurationsWithValues(any(), any(), any(), any(), any(), any()) connectionService.listConnectionsByActorDefinitionIdAndType(any(), any(), any()) jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) + organizationCustomerAttributesService.getOrganizationTiers() } assertEquals(1, actorSelectionInfo.actorIdsToPin.size) @@ -632,6 +642,7 @@ class RolloutActorFinderTest { every { scopedConfigurationService.getScopedConfigurations(any(), any(), any(), any()) } returns mapOf() every { connectionService.listConnectionsByActorDefinitionIdAndType(any(), any(), any()) } returns MOCK_CONNECTION_SYNCS every { jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) } returns JOBS + every { organizationCustomerAttributesService.getOrganizationTiers() } returns emptyMap() val actorSelectionInfo = rolloutActorFinder.getActorSelectionInfo(createMockConnectorRollout(actorDefinitionId), 1) @@ -646,6 +657,7 @@ class RolloutActorFinderTest { scopedConfigurationService.listScopedConfigurationsWithValues(any(), any(), any(), any(), any(), any()) connectionService.listConnectionsByActorDefinitionIdAndType(any(), any(), any()) jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) + organizationCustomerAttributesService.getOrganizationTiers() } // We already exceed the target percentage so shouldn't pin something new @@ -699,9 +711,37 @@ class RolloutActorFinderTest { @ParameterizedTest @MethodSource("actorDefinitionIds") - fun `test filterByTier is a no-op`(actorDefinitionId: UUID) { - // This is not currently being used - assertEquals(CONFIG_SCOPE_MAP.values, rolloutActorFinder.filterByTier(CONFIG_SCOPE_MAP.values)) + fun `test filterByTier excludes organizations listed as tier 0 or 1`(actorDefinitionId: UUID) { + val organizationTiers = + mapOf( + ORGANIZATION_ID_1 to CustomerTier.TIER_0, + ORGANIZATION_ID_2 to CustomerTier.TIER_2, + ) + every { organizationCustomerAttributesService.getOrganizationTiers() } returns organizationTiers + + val candidates = + listOf( + ConfigScopeMapWithId( + id = UUID.randomUUID(), + scopeMap = mapOf(ConfigScopeType.ORGANIZATION to ORGANIZATION_ID_1), + ), + ConfigScopeMapWithId( + id = UUID.randomUUID(), + scopeMap = mapOf(ConfigScopeType.ORGANIZATION to ORGANIZATION_ID_2), + ), + ConfigScopeMapWithId( + id = UUID.randomUUID(), + scopeMap = mapOf(ConfigScopeType.ORGANIZATION to null), + ), + ) + + val filteredCandidates = rolloutActorFinder.filterByTier(candidates) + + assertEquals(2, filteredCandidates.size) + assertTrue(filteredCandidates.any { it.scopeMap[ConfigScopeType.ORGANIZATION] == ORGANIZATION_ID_2 }) + assertTrue(filteredCandidates.any { it.scopeMap[ConfigScopeType.ORGANIZATION] == null }) + + verify { organizationCustomerAttributesService.getOrganizationTiers() } } @ParameterizedTest @@ -1083,6 +1123,145 @@ class RolloutActorFinderTest { verify { jobService.listJobs(any(), any(), any(), any(), any(), any(), any(), any(), any()) } } + @Test + fun `getIdFromConnection filters connections using sourceId when ActorType is SOURCE`() { + val candidates = + listOf( + mapOf( + ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_SOURCE to + ConfigScopeMapWithId( + id = ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_SOURCE, + scopeMap = + mapOf( + ConfigScopeType.ACTOR to ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_SOURCE, + ConfigScopeType.WORKSPACE to ORGANIZATION_1_WORKSPACE_ID_1, + ConfigScopeType.ORGANIZATION to ORGANIZATION_ID_1, + ), + ), + ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_SOURCE to + ConfigScopeMapWithId( + id = ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_SOURCE, + scopeMap = + mapOf( + ConfigScopeType.ACTOR to ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_SOURCE, + ConfigScopeType.WORKSPACE to ORGANIZATION_1_WORKSPACE_ID_1, + ConfigScopeType.ORGANIZATION to ORGANIZATION_ID_1, + ), + ), + ), + ).flatMap { it.values } + + val connections = + listOf( + StandardSync().apply { + sourceId = ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_SOURCE + destinationId = ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION + }, + StandardSync().apply { + sourceId = ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_SOURCE + destinationId = ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION + }, + ) + + val result = rolloutActorFinder.filterByConnectionActorId(candidates, connections, ActorType.SOURCE) + + assertEquals(2, result.size) + assertEquals(ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_SOURCE, result[0].sourceId) + assertEquals(ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_SOURCE, result[1].sourceId) + } + + @Test + fun `getIdFromConnection filters connections using destinationId when ActorType is DESTINATION`() { + val candidates = + listOf( + mapOf( + ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION to + ConfigScopeMapWithId( + id = ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION, + scopeMap = + mapOf( + ConfigScopeType.ACTOR to ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION, + ConfigScopeType.WORKSPACE to ORGANIZATION_1_WORKSPACE_ID_1, + ConfigScopeType.ORGANIZATION to ORGANIZATION_ID_1, + ), + ), + ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION to + ConfigScopeMapWithId( + id = ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION, + scopeMap = + mapOf( + ConfigScopeType.ACTOR to ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION, + ConfigScopeType.WORKSPACE to ORGANIZATION_1_WORKSPACE_ID_1, + ConfigScopeType.ORGANIZATION to ORGANIZATION_ID_1, + ), + ), + ), + ).flatMap { it.values } + + val connections = + listOf( + StandardSync().apply { + sourceId = ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_SOURCE + destinationId = ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION + }, + StandardSync().apply { + sourceId = ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_SOURCE + destinationId = ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION + }, + ) + + val result = rolloutActorFinder.filterByConnectionActorId(candidates, connections, ActorType.DESTINATION) + + assertEquals(2, result.size) + assertEquals(ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION, result[0].destinationId) + assertEquals(ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION, result[1].destinationId) + } + + @Test + fun `getIdFromConnection returns empty list if no matches are found`() { + val candidates = + listOf( + mapOf( + ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION to + ConfigScopeMapWithId( + id = ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION, + scopeMap = + mapOf( + ConfigScopeType.ACTOR to ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_DESTINATION, + ConfigScopeType.WORKSPACE to ORGANIZATION_1_WORKSPACE_ID_1, + ConfigScopeType.ORGANIZATION to ORGANIZATION_ID_1, + ), + ), + ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION to + ConfigScopeMapWithId( + id = ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION, + scopeMap = + mapOf( + ConfigScopeType.ACTOR to ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_DESTINATION, + ConfigScopeType.WORKSPACE to ORGANIZATION_1_WORKSPACE_ID_1, + ConfigScopeType.ORGANIZATION to ORGANIZATION_ID_1, + ), + ), + ), + ).flatMap { it.values } + + val connections = + listOf( + StandardSync().apply { + sourceId = ORGANIZATION_1_WORKSPACE_1_ACTOR_ID_SOURCE + destinationId = ORGANIZATION_2_WORKSPACE_1_ACTOR_ID_DESTINATION + }, + StandardSync().apply { + sourceId = ORGANIZATION_1_WORKSPACE_2_ACTOR_ID_SOURCE + destinationId = ORGANIZATION_2_WORKSPACE_2_ACTOR_ID_DESTINATION + }, + ) + + val result = rolloutActorFinder.filterByConnectionActorId(candidates, connections, ActorType.DESTINATION) + + assertEquals(0, result.size) + } + @Test fun `test getUniqueActorIds with fewer actors than nActorsToPin`() { val sortedConnections = diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImpl.kt index 486aafbb019..a73d9b38967 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImpl.kt @@ -32,6 +32,9 @@ import io.temporal.failure.ApplicationFailure import io.temporal.workflow.Workflow import java.lang.reflect.Field import java.time.Duration +import java.time.Instant +import java.time.OffsetDateTime +import java.time.ZoneOffset private val logger = KotlinLogging.logger {} @@ -107,21 +110,73 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { ) private var startRolloutFailed = false - private var state = ConnectorEnumRolloutState.INITIALIZED + private var connectorRollout: ConnectorRolloutOutput? = null override fun run(input: ConnectorRolloutActivityInputStart): ConnectorEnumRolloutState { - val workflowId = "${input.dockerRepository}:${input.dockerImageTag}:${input.actorDefinitionId.toString().substring(0, 8)}" - logger.info { "Initialized rollout for $workflowId" } + val workflowId = Workflow.getInfo().workflowId + + // Checkpoint to record the workflow version + Workflow.getVersion("ChangedActivityInputStart", Workflow.DEFAULT_VERSION, 1) + + setRollout(input) + // End the workflow if we were unable to start the rollout, or we've reached a terminal state - Workflow.await { startRolloutFailed || ConnectorRolloutFinalState.entries.any { it.value() == state.value() } } + Workflow.await { startRolloutFailed || rolloutStateIsTerminal() } if (startRolloutFailed) { throw ApplicationFailure.newFailure( "Failure starting rollout for $workflowId", ConnectorEnumRolloutState.CANCELED.value(), ) } - logger.info { "Rollout for $workflowId has reached a terminal state: $state" } - return state + logger.info { "Rollout for $workflowId has reached a terminal state: ${connectorRollout?.state}" } + + return getRolloutState() + } + + private fun setRollout(input: ConnectorRolloutActivityInputStart) { + connectorRollout = + ConnectorRolloutOutput( + id = input.connectorRollout?.id, + workflowRunId = input.connectorRollout?.workflowRunId, + actorDefinitionId = input.connectorRollout?.actorDefinitionId, + releaseCandidateVersionId = input.connectorRollout?.releaseCandidateVersionId, + initialVersionId = input.connectorRollout?.initialVersionId, + // In Workflow.DEFAULT_VERSION, input.connectorRollout doesn't exist, and we only store the `state` variable. + // Therefore, we require it to be non-null here. + // Once all DEFAULT_VERSION workflows are finished, we can delete the null branch. + state = input.connectorRollout?.state ?: ConnectorEnumRolloutState.INITIALIZED, + initialRolloutPct = input.connectorRollout?.initialRolloutPct?.toInt(), + currentTargetRolloutPct = input.connectorRollout?.currentTargetRolloutPct?.toInt(), + finalTargetRolloutPct = input.connectorRollout?.finalTargetRolloutPct?.toInt(), + hasBreakingChanges = false, + rolloutStrategy = input.connectorRollout?.rolloutStrategy, + maxStepWaitTimeMins = input.connectorRollout?.maxStepWaitTimeMins?.toInt(), + updatedBy = input.connectorRollout?.updatedBy.toString(), + createdAt = getOffset(input.connectorRollout?.createdAt), + updatedAt = getOffset(input.connectorRollout?.updatedAt), + completedAt = getOffset(input.connectorRollout?.completedAt), + expiresAt = getOffset(input.connectorRollout?.expiresAt), + errorMsg = input.connectorRollout?.errorMsg, + failedReason = input.connectorRollout?.failedReason, + actorSelectionInfo = input.actorSelectionInfo, + actorSyncs = input.actorSyncs, + ) + } + + private fun getOffset(timestamp: Long?): OffsetDateTime? { + return if (timestamp == null) { + null + } else { + Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC) + } + } + + private fun getRolloutState(): ConnectorEnumRolloutState { + return connectorRollout?.state ?: ConnectorEnumRolloutState.INITIALIZED + } + + private fun rolloutStateIsTerminal(): Boolean { + return ConnectorRolloutFinalState.entries.any { it.value() == getRolloutState().value() } } override fun startRollout(input: ConnectorRolloutActivityInputStart): ConnectorRolloutOutput { @@ -130,7 +185,7 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { return try { val output = startRolloutActivity.startRollout(workflowRunId, input) logger.info { "startRolloutActivity.startRollout" } - state = output.state + connectorRollout = output output } catch (e: Exception) { val newState = ConnectorEnumRolloutState.CANCELED @@ -142,6 +197,7 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { actorDefinitionId = input.actorDefinitionId, rolloutId = input.rolloutId, errorMsg = "Failed to start rollout.", + failureMsg = e.message, updatedBy = input.updatedBy, rolloutStrategy = input.rolloutStrategy, ), @@ -175,7 +231,7 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { override fun getRollout(input: ConnectorRolloutActivityInputGet): ConnectorRolloutOutput { logger.info { "getRollout: calling getRolloutActivity" } val output = getRolloutActivity.getRollout(input) - logger.info { "getRolloutActivity.getRollout pinned_actors = ${output.actorIds}" } + logger.info { "getRolloutActivity.getRollout = $output" } return output } @@ -189,8 +245,8 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { override fun doRollout(input: ConnectorRolloutActivityInputRollout): ConnectorRolloutOutput { logger.info { "doRollout: calling doRolloutActivity" } val output = doRolloutActivity.doRollout(input) - state = output.state - logger.info { "doRolloutActivity.doRollout pinned_connections = ${output.actorIds}" } + connectorRollout = output + logger.info { "doRolloutActivity.doRollout = $output" } return output } @@ -205,7 +261,7 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { // Start a GH workflow to make the release candidate available as `latest`, if the rollout was successful // Delete the release candidate on either success or failure (but not cancellation) if (input.result == ConnectorRolloutFinalState.SUCCEEDED || input.result == ConnectorRolloutFinalState.FAILED_ROLLED_BACK) { - if (state == ConnectorEnumRolloutState.FINALIZING) { + if (connectorRollout?.state == ConnectorEnumRolloutState.FINALIZING) { logger.info { "finalizeRollout: already promoted/rolled back, skipping; if you need to re-run the GHA please do so manually " } } else { logger.info { "finalizeRollout: calling promoteOrRollback" } @@ -225,7 +281,7 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { rolloutStrategy = input.rolloutStrategy, ), ) - state = output.state + connectorRollout = output } } @@ -250,11 +306,11 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { } } - // Unpin all actors that were pinned to the release candidate + // Mark the rollout as finalized, and unpin all actors that were pinned to the release candidate if appropriate logger.info { "finalizeRollout: calling finalizeRolloutActivity" } val rolloutResult = finalizeRolloutActivity.finalizeRollout(input) logger.info { "finalizeRolloutActivity.finalizeRollout rolloutResult = $rolloutResult" } - state = rolloutResult.state + connectorRollout = rolloutResult return rolloutResult } diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/ActivityHelper.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/ActivityHelper.kt index 5d5e0e549e5..c5f89d597cc 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/ActivityHelper.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/ActivityHelper.kt @@ -1,5 +1,7 @@ package io.airbyte.connector.rollout.worker.activities +import io.airbyte.api.client.model.generated.ConnectorRolloutStrategy +import io.airbyte.config.ConnectorEnumRolloutStrategy import io.airbyte.connector.rollout.shared.Constants import io.github.oshai.kotlinlogging.KotlinLogging import io.temporal.failure.ApplicationFailure @@ -28,3 +30,7 @@ fun handleAirbyteApiClientException(e: ClientException): Nothing { throw ApplicationFailure.newFailure(body, Constants.AIRBYTE_API_CLIENT_EXCEPTION) } + +fun getRolloutStrategyFromInput(rolloutStrategy: ConnectorEnumRolloutStrategy?): ConnectorRolloutStrategy { + return if (rolloutStrategy == null) ConnectorRolloutStrategy.MANUAL else ConnectorRolloutStrategy.valueOf(rolloutStrategy.toString().uppercase()) +} diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt index 9d9081a65f9..ade9d8c5f93 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt @@ -8,7 +8,6 @@ import io.airbyte.api.client.AirbyteApiClient import io.airbyte.api.client.generated.ConnectorRolloutApi import io.airbyte.api.client.model.generated.ConnectorRolloutRequestBody import io.airbyte.api.client.model.generated.ConnectorRolloutResponse -import io.airbyte.api.client.model.generated.ConnectorRolloutStrategy import io.airbyte.connector.rollout.shared.ConnectorRolloutActivityHelpers import io.airbyte.connector.rollout.shared.models.ConnectorRolloutActivityInputRollout import io.airbyte.connector.rollout.shared.models.ConnectorRolloutOutput @@ -33,7 +32,7 @@ class DoRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : Do val body = ConnectorRolloutRequestBody( input.rolloutId, - ConnectorRolloutStrategy.MANUAL, + getRolloutStrategyFromInput(input.rolloutStrategy), input.actorIds, input.targetPercentage, input.updatedBy, diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt index c52eccfd65b..1dad5aacb48 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt @@ -9,7 +9,6 @@ import io.airbyte.api.client.generated.ConnectorRolloutApi import io.airbyte.api.client.model.generated.ConnectorRolloutFinalizeRequestBody import io.airbyte.api.client.model.generated.ConnectorRolloutFinalizeResponse import io.airbyte.api.client.model.generated.ConnectorRolloutStateTerminal -import io.airbyte.api.client.model.generated.ConnectorRolloutStrategy import io.airbyte.config.ConnectorRolloutFinalState import io.airbyte.connector.rollout.shared.ConnectorRolloutActivityHelpers import io.airbyte.connector.rollout.shared.models.ConnectorRolloutActivityInputFinalize @@ -44,10 +43,11 @@ class FinalizeRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient ConnectorRolloutFinalizeRequestBody( input.rolloutId, state, - ConnectorRolloutStrategy.MANUAL, + getRolloutStrategyFromInput(input.rolloutStrategy), errorMsg, failureReason, input.updatedBy, + input.retainPinsOnCancellation, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt index cdc9b603034..08892a7e5db 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt @@ -8,7 +8,6 @@ import io.airbyte.api.client.AirbyteApiClient import io.airbyte.api.client.generated.ConnectorRolloutApi import io.airbyte.api.client.model.generated.ConnectorRolloutStartRequestBody import io.airbyte.api.client.model.generated.ConnectorRolloutStartResponse -import io.airbyte.api.client.model.generated.ConnectorRolloutStrategy import io.airbyte.connector.rollout.shared.ConnectorRolloutActivityHelpers import io.airbyte.connector.rollout.shared.models.ConnectorRolloutActivityInputStart import io.airbyte.connector.rollout.shared.models.ConnectorRolloutOutput @@ -37,8 +36,9 @@ class StartRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : ConnectorRolloutStartRequestBody( input.rolloutId, workflowRunId, - ConnectorRolloutStrategy.MANUAL, + getRolloutStrategyFromInput(input.rolloutStrategy), input.updatedBy, + true, ) return try { diff --git a/airbyte-connector-rollout-worker/src/test/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImplTest.kt b/airbyte-connector-rollout-worker/src/test/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImplTest.kt index c88bafec803..0c153fd00ba 100644 --- a/airbyte-connector-rollout-worker/src/test/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImplTest.kt +++ b/airbyte-connector-rollout-worker/src/test/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImplTest.kt @@ -154,6 +154,8 @@ class ConnectorRolloutWorkflowImplTest { ROLLOUT_ID, USER_ID, ROLLOUT_STRATEGY, + null, + null, ), ) } diff --git a/airbyte-connector-rollout-worker/src/test/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImplTest.kt b/airbyte-connector-rollout-worker/src/test/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImplTest.kt index 640a6979902..4d05f1f9bb5 100644 --- a/airbyte-connector-rollout-worker/src/test/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImplTest.kt +++ b/airbyte-connector-rollout-worker/src/test/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImplTest.kt @@ -26,6 +26,8 @@ class StartRolloutActivityImplTest { private val ROLLOUT_ID = UUID.randomUUID() private val USER_ID = UUID.randomUUID() private val ROLLOUT_STRATEGY = ConnectorEnumRolloutStrategy.MANUAL + private val INITIAL_ROLLOUT_PERCENT = 0 + private val FINAL_TARGET_ROLLOUT_PERCENT = 100 } @BeforeEach @@ -48,6 +50,29 @@ class StartRolloutActivityImplTest { rolloutId = ROLLOUT_ID, updatedBy = USER_ID, rolloutStrategy = ROLLOUT_STRATEGY, + initialRolloutPct = INITIAL_ROLLOUT_PERCENT, + finalTargetRolloutPct = FINAL_TARGET_ROLLOUT_PERCENT, + ) + + startRolloutActivity.startRollout("workflowRunId", input) + + verify { connectorRolloutApi.startConnectorRollout(any()) } + } + + @Test + fun `test startRollout calls connectorRolloutApi with null values`() { + every { connectorRolloutApi.startConnectorRollout(any()) } returns getMockConnectorRolloutResponse() + + val input = + ConnectorRolloutActivityInputStart( + dockerRepository = DOCKER_REPOSITORY, + dockerImageTag = DOCKER_IMAGE_TAG, + actorDefinitionId = ACTOR_DEFINITION_ID, + rolloutId = ROLLOUT_ID, + updatedBy = USER_ID, + rolloutStrategy = null, + initialRolloutPct = null, + finalTargetRolloutPct = null, ) startRolloutActivity.startRollout("workflowRunId", input) diff --git a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt index 6c943e1d721..635e8d794ce 100644 --- a/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt +++ b/airbyte-connector-sidecar/src/main/kotlin/io/airbyte/connectorSidecar/ConnectorWatcher.kt @@ -119,7 +119,7 @@ class ConnectorWatcher( logger.info { "Connector exited with exit code $exitCode" } val streamFactory = getStreamFactory(input.integrationLauncherConfig) - return when (input.operationType!!) { + return when (input.operationType) { SidecarInput.OperationType.CHECK -> connectorMessageProcessor.run( outputStream, @@ -176,7 +176,7 @@ class ConnectorWatcher( ) { logger.error(e) { "Error performing operation: ${e.javaClass.name}" } val connectorOutput = - when (input.operationType!!) { + when (input.operationType) { SidecarInput.OperationType.CHECK -> getFailedOutput(input.checkConnectionInput, e) SidecarInput.OperationType.DISCOVER -> getFailedOutput(input.discoverCatalogInput, e) SidecarInput.OperationType.SPEC -> getFailedOutput(input.integrationLauncherConfig.dockerImage, e) @@ -187,14 +187,10 @@ class ConnectorWatcher( } @VisibleForTesting - fun readFile(fileName: String): String { - return Files.readString(Path.of(configDir, fileName)) - } + fun readFile(fileName: String): String = Files.readString(Path.of(configDir, fileName)) @VisibleForTesting - fun areNeededFilesPresent(): Boolean { - return Files.exists(outputPath) && Files.exists(Path.of(configDir, FileConstants.EXIT_CODE_FILE)) - } + fun areNeededFilesPresent(): Boolean = Files.exists(outputPath) && Files.exists(Path.of(configDir, FileConstants.EXIT_CODE_FILE)) @VisibleForTesting fun getStreamFactory(integrationLauncherConfig: IntegrationLauncherConfig): AirbyteStreamFactory { @@ -240,11 +236,11 @@ class ConnectorWatcher( @VisibleForTesting fun getFailedOutput( - input: StandardCheckConnectionInput, + input: StandardCheckConnectionInput?, e: Exception, ): ConnectorJobOutput { val failureOrigin = - if (input.actorType == ActorType.SOURCE) { + if (input?.actorType == ActorType.SOURCE) { FailureReason.FailureOrigin.SOURCE } else { FailureReason.FailureOrigin.DESTINATION @@ -270,13 +266,13 @@ class ConnectorWatcher( @VisibleForTesting fun getFailedOutput( - input: StandardDiscoverCatalogInput, + input: StandardDiscoverCatalogInput?, e: Exception, ): ConnectorJobOutput { val failureReason = FailureReason() .withFailureOrigin(FailureReason.FailureOrigin.SOURCE) - .withExternalMessage("The discover catalog failed due to an internal error for source: ${input.sourceId}") + .withExternalMessage("The discover catalog failed due to an internal error for source: ${input?.sourceId}") .withInternalMessage(e.message) .withStacktrace(e.stackTraceToString()) diff --git a/airbyte-connector-sidecar/src/main/resources/application.yml b/airbyte-connector-sidecar/src/main/resources/application.yml index 55b7c15a64c..e37b3fb8d0a 100644 --- a/airbyte-connector-sidecar/src/main/resources/application.yml +++ b/airbyte-connector-sidecar/src/main/resources/application.yml @@ -26,6 +26,7 @@ airbyte: state: ${STORAGE_BUCKET_STATE} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT} activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD} + audit-logging: ${STORAGE_BUCKET_AUDIT_LOGGING} azure: connection-string: ${AZURE_STORAGE_CONNECTION_STRING} gcs: diff --git a/airbyte-container-orchestrator/build.gradle.kts b/airbyte-container-orchestrator/build.gradle.kts index eced6c1bfbe..8e4813a0f80 100644 --- a/airbyte-container-orchestrator/build.gradle.kts +++ b/airbyte-container-orchestrator/build.gradle.kts @@ -27,8 +27,6 @@ plugins { val airbyteProtocol: Configuration by configurations.creating dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) ksp(libs.bundles.micronaut.annotation.processor) diff --git a/airbyte-container-orchestrator/src/main/resources/application.yml b/airbyte-container-orchestrator/src/main/resources/application.yml index 26589493196..9df5dfbecdd 100644 --- a/airbyte-container-orchestrator/src/main/resources/application.yml +++ b/airbyte-container-orchestrator/src/main/resources/application.yml @@ -64,6 +64,7 @@ airbyte: state: ${STORAGE_BUCKET_STATE} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT} activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD} + audit-logging: ${STORAGE_BUCKET_AUDIT_LOGGING} azure: connection-string: ${AZURE_STORAGE_CONNECTION_STRING} gcs: diff --git a/airbyte-container-orchestrator/src/test/resources/application-test.yml b/airbyte-container-orchestrator/src/test/resources/application-test.yml index 2c468bfc6bc..645a68e59ca 100644 --- a/airbyte-container-orchestrator/src/test/resources/application-test.yml +++ b/airbyte-container-orchestrator/src/test/resources/application-test.yml @@ -33,6 +33,7 @@ airbyte: state: ${STORAGE_BUCKET_STATE:state} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT:workload-output} activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD:activity-payload} + audit-logging: ${STORAGE_BUCKET_AUDIT_LOGGING} gcs: application-credentials: ${GOOGLE_APPLICATION_CREDENTIALS:not-blank} minio: diff --git a/airbyte-cron/build.gradle.kts b/airbyte-cron/build.gradle.kts index e38a85d553a..bd81efaea51 100644 --- a/airbyte-cron/build.gradle.kts +++ b/airbyte-cron/build.gradle.kts @@ -5,8 +5,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) @@ -26,7 +24,6 @@ dependencies { implementation(libs.kotlin.logging) implementation(libs.okhttp) implementation(libs.sentry.java) - implementation(libs.lombok) implementation(libs.commons.io) implementation(project(":oss:airbyte-api:server-api")) @@ -82,8 +79,11 @@ tasks.withType().configureEach { // Copies the connector <> platform compatibility JSON file for use in tests tasks.register("copyPlatformCompatibilityMatrix") { - val platformCompatibilityFile = project.rootProject.layout.projectDirectory.file("tools/connectors/platform-compatibility/platform-compatibility.json") - if(file(platformCompatibilityFile).exists()) { + val platformCompatibilityFile = + project.rootProject.layout.projectDirectory.file( + "tools/connectors/platform-compatibility/platform-compatibility.json", + ) + if (file(platformCompatibilityFile).exists()) { from(platformCompatibilityFile) into(project.layout.projectDirectory.dir("src/test/resources")) } @@ -97,4 +97,4 @@ afterEvaluate { tasks.named("spotlessStyling") { dependsOn("copyPlatformCompatibilityMatrix") } -} \ No newline at end of file +} diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/jobs/DeclarativeSourcesUpdater.java b/airbyte-cron/src/main/java/io/airbyte/cron/jobs/DeclarativeSourcesUpdater.java index a4374189330..1b886193abc 100644 --- a/airbyte-cron/src/main/java/io/airbyte/cron/jobs/DeclarativeSourcesUpdater.java +++ b/airbyte-cron/src/main/java/io/airbyte/cron/jobs/DeclarativeSourcesUpdater.java @@ -5,9 +5,13 @@ package io.airbyte.cron.jobs; import static io.airbyte.cron.MicronautCronRunner.SCHEDULED_TRACE_OPERATION_NAME; +import static io.airbyte.featureflag.ContextKt.ANONYMOUS; import datadog.trace.api.Trace; import io.airbyte.config.init.DeclarativeSourceUpdater; +import io.airbyte.featureflag.FeatureFlagClient; +import io.airbyte.featureflag.RunDeclarativeSourcesUpdater; +import io.airbyte.featureflag.Workspace; import io.airbyte.metrics.lib.MetricAttribute; import io.airbyte.metrics.lib.MetricClient; import io.airbyte.metrics.lib.MetricTags; @@ -30,17 +34,25 @@ public class DeclarativeSourcesUpdater { private final DeclarativeSourceUpdater declarativeSourceUpdater; private final MetricClient metricClient; + private final FeatureFlagClient featureFlagClient; public DeclarativeSourcesUpdater(@Named("remoteDeclarativeSourceUpdater") final DeclarativeSourceUpdater declarativeSourceUpdater, - final MetricClient metricClient) { + final MetricClient metricClient, + final FeatureFlagClient featureFlagClient) { log.info("Creating declarative source updater"); this.declarativeSourceUpdater = declarativeSourceUpdater; this.metricClient = metricClient; + this.featureFlagClient = featureFlagClient; } @Trace(operationName = SCHEDULED_TRACE_OPERATION_NAME) @Scheduled(fixedRate = "10m") void updateDefinitions() { + if (!featureFlagClient.boolVariation(RunDeclarativeSourcesUpdater.INSTANCE, new Workspace(ANONYMOUS))) { + log.info("Declarative sources update feature flag is disabled. Skipping updating declarative sources."); + return; + } + log.info("Getting latest CDK versions and updating declarative sources..."); metricClient.count(OssMetricsRegistry.CRON_JOB_RUN_BY_CRON_TYPE, 1, new MetricAttribute(MetricTags.CRON_TYPE, "declarative_sources_updater")); declarativeSourceUpdater.apply(); diff --git a/airbyte-csp-check/build.gradle.kts b/airbyte-csp-check/build.gradle.kts index f0b5c8c8fad..351427818da 100644 --- a/airbyte-csp-check/build.gradle.kts +++ b/airbyte-csp-check/build.gradle.kts @@ -7,14 +7,14 @@ dependencies { ksp(platform(libs.micronaut.platform)) ksp(libs.bundles.micronaut.annotation.processor) - api(libs.bundles.micronaut.annotation) - api(libs.bundles.micronaut.kotlin) - api(libs.kotlin.logging) - api(libs.bundles.jackson) + implementation(libs.bundles.micronaut.annotation) + implementation(libs.bundles.micronaut.kotlin) + implementation(libs.kotlin.logging) + implementation(libs.bundles.jackson) - api(project(":oss:airbyte-commons")) - api(project(":oss:airbyte-commons-micronaut")) - api(project(":oss:airbyte-config:config-models")) + implementation(project(":oss:airbyte-commons")) + implementation(project(":oss:airbyte-commons-micronaut")) + implementation(project(":oss:airbyte-config:config-models")) implementation(libs.micronaut.inject) implementation(project(":oss:airbyte-commons-storage")) diff --git a/airbyte-csp-check/src/main/kotlin/Check.kt b/airbyte-csp-check/src/main/kotlin/CspChecker.kt similarity index 55% rename from airbyte-csp-check/src/main/kotlin/Check.kt rename to airbyte-csp-check/src/main/kotlin/CspChecker.kt index 967246ea18e..1ec4a8d736c 100644 --- a/airbyte-csp-check/src/main/kotlin/Check.kt +++ b/airbyte-csp-check/src/main/kotlin/CspChecker.kt @@ -1,4 +1,4 @@ -package io.airbyte.commons.env +package io.airbyte.commons.csp import io.airbyte.commons.annotation.InternalForTesting import io.airbyte.commons.storage.DocumentType @@ -22,7 +22,8 @@ internal const val STORAGE_DOC_CONTENTS = "environment permissions check" * To ensure that all of the proper storage checks are performed, each [DocumentType]'s [StorageClient] needs to be checked. */ @InternalForTesting -internal val storageDocTypes = listOf(DocumentType.STATE, DocumentType.LOGS, DocumentType.WORKLOAD_OUTPUT, DocumentType.ACTIVITY_PAYLOADS) +internal val storageDocTypes = + listOf(DocumentType.STATE, DocumentType.LOGS, DocumentType.WORKLOAD_OUTPUT, DocumentType.ACTIVITY_PAYLOADS, DocumentType.AUDIT_LOGS) /** * Holds the results of the various environmental checks executed. @@ -33,25 +34,32 @@ data class CheckResult( val storage: Storage, ) +/** Aliasing an [Action] to a string */ +typealias Action = String + /** * Represents the storage checks. * * @property type the storage type this check represents - * @property results contains the results from the storage checks + * @property buckets contains the results from the storage checks */ data class Storage( val type: StorageType, - val results: Section, + val buckets: List, ) -/** Section represents a grouping of [Permission] objects into a named section. */ -typealias Section = Map - /** - * Permission represents the status of a named check - * @TODO: rename this to something better - **/ -typealias Permission = Map + * Represents a collection of bucket checks. + * + * @property name is the name of the bucket + * @property documentType is the [DocumentType] this bucket represents + * @property results are the result of the [Action] and its resulting [Status] + */ +data class Bucket( + val name: String, + val documentType: DocumentType, + val results: Map, +) /** * An exception that can be thrown by one of the various checks executed in this package. @@ -67,7 +75,7 @@ internal class CheckException( * @property storageFactory injected instance of the [StorageClientFactory] which is used to create the [StorageClient]. */ @Singleton -class Check( +class CspChecker( @Value("\${$STORAGE_TYPE}") private val storageType: StorageType, private val storageFactory: StorageClientFactory, ) { @@ -80,49 +88,55 @@ class Check( private fun checkStorage(): Storage = storageDocTypes .map { storageFactory.create(it) } - .map { checkStoragePermissions(it) } - .reduce { a, b -> a + b } + .map { checkBucket(it) } + .toList() .let { - Storage(type = storageType, results = it) + Storage(type = storageType, buckets = it) } } /** - * Runs a series of storage specific checks, returning a list of check statuses. + * Runs a series of storage specific checks, returning a [Bucket] response. */ -private fun checkStoragePermissions(client: StorageClient): Section { - val permissions = mutableMapOf() - toPermission("write") { client.write(STORAGE_DOC_ID, STORAGE_DOC_CONTENTS) }.let { permissions += it } - toPermission("read") { - client.read(STORAGE_DOC_ID).also { - if (it != STORAGE_DOC_CONTENTS) { - throw CheckException("read contents did not match written contents") - } - } - }.let { permissions += it } - toPermission("list") { client.list(STORAGE_DOC_ID) }.let { permissions += it } - toPermission("delete") { client.delete(STORAGE_DOC_ID) }.let { permissions += it } +private fun checkBucket(client: StorageClient): Bucket { + val results = + mapOf( + toStatus("write") { client.write(STORAGE_DOC_ID, STORAGE_DOC_CONTENTS) }, + toStatus("read") { + client.read(STORAGE_DOC_ID).also { + if (it != STORAGE_DOC_CONTENTS) { + throw CheckException("read contents did not match written contents") + } + } + }, + toStatus("list") { client.list(STORAGE_DOC_ID) }, + toStatus("delete") { client.delete(STORAGE_DOC_ID) }, + ) - return mapOf(client.documentType().toString() to permissions) + return Bucket( + name = client.bucketName, + documentType = client.documentType, + results = results, + ) } /** - * Converts a [block] to a [GreenStatus] or [RedStatus]. + * Converts a [block] to a [Status]. * - * @param name is the name of this check. - * @param block is the code to execute. + * @param name + * @param block is the code to execute * - * If [block] throws an exception, the status of this check will be one of [RedStatus]. - * Otherwise, a [GreenStatus] will be returned. + * If [block] throws an exception, the status of this check will be one of [FailStatus]. + * Otherwise, a [PassStatus] will be returned. */ -private inline fun toPermission( +private inline fun toStatus( name: String, block: () -> R, -): Permission = +): Pair = runCatching { block() } .fold( - onSuccess = { GreenStatus() }, - onFailure = { RedStatus(throwable = it) }, + onSuccess = { PassStatus() }, + onFailure = { FailStatus(throwable = it) }, ).let { - mapOf(name to it) + Pair(name, it) } diff --git a/airbyte-csp-check/src/main/kotlin/Status.kt b/airbyte-csp-check/src/main/kotlin/Status.kt index c8b8be5a62b..b6252e0f466 100644 --- a/airbyte-csp-check/src/main/kotlin/Status.kt +++ b/airbyte-csp-check/src/main/kotlin/Status.kt @@ -1,4 +1,4 @@ -package io.airbyte.commons.env +package io.airbyte.commons.csp import com.fasterxml.jackson.annotation.JsonInclude import io.airbyte.commons.annotation.InternalForTesting @@ -16,25 +16,25 @@ sealed class Status( ) @InternalForTesting -internal const val STATUS_GREEN = "green" - -@InternalForTesting -internal const val STATUS_RED = "red" +internal const val STATUS_PASS = "pass" /** * Represents a successful status check. */ -class GreenStatus : Status(result = STATUS_GREEN) { +class PassStatus : Status(result = STATUS_PASS) { override fun toString(): String = result } +@InternalForTesting +internal const val STATUS_FAIL = "fail" + /** * Represents a failed status check. * * @param throwable the throwable that triggered this failure. */ -class RedStatus( +class FailStatus( throwable: Throwable? = null, -) : Status(result = STATUS_RED, message = throwable?.message) { +) : Status(result = STATUS_FAIL, message = throwable?.message) { override fun toString(): String = "$result ($message)" } diff --git a/airbyte-csp-check/src/test/kotlin/CheckTest.kt b/airbyte-csp-check/src/test/kotlin/CspCheckerTest.kt similarity index 56% rename from airbyte-csp-check/src/test/kotlin/CheckTest.kt rename to airbyte-csp-check/src/test/kotlin/CspCheckerTest.kt index ca52a02f6db..97a3c0b152b 100644 --- a/airbyte-csp-check/src/test/kotlin/CheckTest.kt +++ b/airbyte-csp-check/src/test/kotlin/CspCheckerTest.kt @@ -1,5 +1,6 @@ -package io.airbyte.commons.env +package io.airbyte.commons.csp +import io.airbyte.commons.storage.DocumentType import io.airbyte.commons.storage.StorageClient import io.airbyte.commons.storage.StorageClientFactory import io.airbyte.commons.storage.StorageType @@ -10,7 +11,7 @@ import io.mockk.mockk import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test -class CheckTest { +class CspCheckerTest { @Test fun `verify happy path`() { val storageType = StorageType.LOCAL @@ -21,8 +22,9 @@ class CheckTest { every { write(any(), any()) } just Runs every { read(any()) } returns STORAGE_DOC_CONTENTS every { delete(any()) } returns true - every { documentType() } returnsMany storageDocTypes - every { storageType() } returns storageType + every { documentType } returnsMany storageDocTypes + every { this@mockk.storageType } returns storageType + every { bucketName } returns "test-bucket" every { key(any()) } answers { callOriginal() } } @@ -31,22 +33,21 @@ class CheckTest { every { create(any()) } returns client } - val checkResult = Check(storageType = storageType, storageFactory = factory).check() + val checkResult = CspChecker(storageType = storageType, storageFactory = factory).check() assertEquals(StorageType.LOCAL, checkResult.storage.type) - with(checkResult.storage.results) { - assertEquals(4, size) - // 4 storage clients tested - assertEquals(setOf("STATE", "LOGS", "WORKLOAD_OUTPUT", "ACTIVITY_PAYLOADS"), keys) - // each client should have four tests - // each test should be green - forEach { (_, v) -> - assertEquals(setOf("write", "read", "list", "delete"), v.keys) - v.forEach { - assertEquals(STATUS_GREEN, it.value.result) - } - } - } + assertEquals( + setOf(DocumentType.STATE, DocumentType.LOGS, DocumentType.WORKLOAD_OUTPUT, DocumentType.ACTIVITY_PAYLOADS, DocumentType.AUDIT_LOGS), + checkResult.storage.buckets + .map { it.documentType } + .toSet(), + ) + assertEquals( + "test-bucket", + checkResult.storage.buckets + .first() + .name, + ) } @Test @@ -60,8 +61,9 @@ class CheckTest { every { write(any(), any()) } just Runs every { read(any()) } returns STORAGE_DOC_CONTENTS every { delete(any()) } returns true - every { documentType() } returnsMany storageDocTypes - every { storageType() } returns storageType + every { documentType } returnsMany storageDocTypes + every { this@mockk.storageType } returns storageType + every { bucketName } returns "test-bucket" every { key(any()) } answers { callOriginal() } } @@ -70,10 +72,15 @@ class CheckTest { every { create(any()) } returns client } - val checkResult = Check(storageType = storageType, storageFactory = factory).check() + val checkResult = CspChecker(storageType = storageType, storageFactory = factory).check() - with(checkResult.storage.results["STATE"]?.get("list")) { - assertEquals(STATUS_RED, this?.result) + with( + checkResult.storage.buckets + .find { it.documentType == DocumentType.STATE } + ?.results + ?.get("list"), + ) { + assertEquals(STATUS_FAIL, this?.result) assertEquals(msg, this?.message) } } @@ -89,8 +96,9 @@ class CheckTest { every { write(any(), any()) } throws RuntimeException(msg) every { read(any()) } returns STORAGE_DOC_CONTENTS every { delete(any()) } returns true - every { documentType() } returnsMany storageDocTypes - every { storageType() } returns storageType + every { documentType } returnsMany storageDocTypes + every { this@mockk.storageType } returns storageType + every { bucketName } returns "test-bucket" every { key(any()) } answers { callOriginal() } } @@ -99,10 +107,15 @@ class CheckTest { every { create(any()) } returns client } - val checkResult = Check(storageType = storageType, storageFactory = factory).check() + val checkResult = CspChecker(storageType = storageType, storageFactory = factory).check() - with(checkResult.storage.results["STATE"]?.get("write")) { - assertEquals(STATUS_RED, this?.result) + with( + checkResult.storage.buckets + .find { it.documentType == DocumentType.STATE } + ?.results + ?.get("write"), + ) { + assertEquals(STATUS_FAIL, this?.result) assertEquals(msg, this?.message) } } @@ -118,8 +131,9 @@ class CheckTest { every { write(any(), any()) } just Runs every { read(any()) } throws RuntimeException(msg) every { delete(any()) } returns true - every { documentType() } returnsMany storageDocTypes - every { storageType() } returns storageType + every { documentType } returnsMany storageDocTypes + every { this@mockk.storageType } returns storageType + every { bucketName } returns "test-bucket" every { key(any()) } answers { callOriginal() } } @@ -128,10 +142,15 @@ class CheckTest { every { create(any()) } returns client } - val checkResult = Check(storageType = storageType, storageFactory = factory).check() + val checkResult = CspChecker(storageType = storageType, storageFactory = factory).check() - with(checkResult.storage.results["STATE"]?.get("read")) { - assertEquals(STATUS_RED, this?.result) + with( + checkResult.storage.buckets + .find { it.documentType == DocumentType.STATE } + ?.results + ?.get("read"), + ) { + assertEquals(STATUS_FAIL, this?.result) assertEquals(msg, this?.message) } } @@ -147,8 +166,9 @@ class CheckTest { every { write(any(), any()) } just Runs every { read(any()) } returns STORAGE_DOC_CONTENTS every { delete(any()) } throws RuntimeException(msg) - every { documentType() } returnsMany storageDocTypes - every { storageType() } returns storageType + every { documentType } returnsMany storageDocTypes + every { this@mockk.storageType } returns storageType + every { bucketName } returns "test-bucket" every { key(any()) } answers { callOriginal() } } @@ -157,10 +177,15 @@ class CheckTest { every { create(any()) } returns client } - val checkResult = Check(storageType = storageType, storageFactory = factory).check() + val checkResult = CspChecker(storageType = storageType, storageFactory = factory).check() - with(checkResult.storage.results["STATE"]?.get("delete")) { - assertEquals(STATUS_RED, this?.result) + with( + checkResult.storage.buckets + .find { it.documentType == DocumentType.STATE } + ?.results + ?.get("delete"), + ) { + assertEquals(STATUS_FAIL, this?.result) assertEquals(msg, this?.message) } } diff --git a/airbyte-data/build.gradle.kts b/airbyte-data/build.gradle.kts index dea7a48967f..f4c91e95431 100644 --- a/airbyte-data/build.gradle.kts +++ b/airbyte-data/build.gradle.kts @@ -5,10 +5,8 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - api(libs.bundles.micronaut.annotation) + api(libs.micronaut.cache.caffeine) ksp(platform(libs.micronaut.platform)) ksp(libs.bundles.micronaut.annotation.processor) @@ -19,6 +17,7 @@ dependencies { implementation(libs.bundles.apache) implementation(libs.bundles.jackson) implementation(libs.bundles.micronaut.data.jdbc) + implementation(libs.bundles.datadog) implementation(libs.guava) implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-commons")) @@ -37,9 +36,6 @@ dependencies { implementation(libs.bundles.keycloak.client) implementation(libs.micronaut.security.jwt) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) - testImplementation(libs.assertj.core) testImplementation(libs.bundles.micronaut.test) testImplementation(libs.postgresql) diff --git a/airbyte-data/src/main/java/io/airbyte/data/repositories/domain/UserInvitation.java b/airbyte-data/src/main/java/io/airbyte/data/repositories/domain/UserInvitation.java deleted file mode 100644 index bbdb905b359..00000000000 --- a/airbyte-data/src/main/java/io/airbyte/data/repositories/domain/UserInvitation.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.data.repositories.domain; - -import io.airbyte.db.instance.configs.jooq.generated.enums.InvitationStatus; -import io.airbyte.db.instance.configs.jooq.generated.enums.PermissionType; -import io.airbyte.db.instance.configs.jooq.generated.enums.ScopeType; -import io.micronaut.data.annotation.AutoPopulated; -import io.micronaut.data.annotation.DateCreated; -import io.micronaut.data.annotation.DateUpdated; -import io.micronaut.data.annotation.Id; -import io.micronaut.data.annotation.MappedEntity; -import io.micronaut.data.annotation.TypeDef; -import io.micronaut.data.model.DataType; -import java.time.OffsetDateTime; -import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.EqualsAndHashCode; -import lombok.Getter; -import lombok.Setter; - -@Builder(toBuilder = true) -@AllArgsConstructor -@Getter -@Setter -@EqualsAndHashCode -@MappedEntity("user_invitation") -public class UserInvitation { - - @Id - @AutoPopulated - private UUID id; - - private String inviteCode; - - private UUID inviterUserId; - - private String invitedEmail; - - private UUID scopeId; - - @TypeDef(type = DataType.OBJECT) - private ScopeType scopeType; - - @TypeDef(type = DataType.OBJECT) - private PermissionType permissionType; - - @TypeDef(type = DataType.OBJECT) - private InvitationStatus status; - - @DateCreated - private OffsetDateTime createdAt; - - @DateUpdated - private OffsetDateTime updatedAt; - -} diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/ConnectionService.java b/airbyte-data/src/main/java/io/airbyte/data/services/ConnectionService.java index 13f0220e996..727ab043513 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/ConnectionService.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/ConnectionService.java @@ -59,8 +59,10 @@ List listConnectionsByActorDefinitionIdAndType(UUID actorDefinitio Set listEarlySyncJobs(final int freeUsageInterval, final int jobsFetchRange) throws IOException; - void disableConnectionsById(final List connectionIds) throws IOException; + Set disableConnectionsById(final List connectionIds) throws IOException; List listConnectionIdsForWorkspace(UUID workspaceId) throws IOException; + List listConnectionIdsForOrganization(UUID organizationId) throws IOException; + } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/OAuthService.java b/airbyte-data/src/main/java/io/airbyte/data/services/OAuthService.java index 96814f3742f..81172f936bc 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/OAuthService.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/OAuthService.java @@ -17,8 +17,6 @@ */ public interface OAuthService { - Optional getSourceOAuthParamByDefinitionIdOptional(UUID workspaceId, UUID sourceDefinitionId) throws IOException; - void writeSourceOAuthParam(SourceOAuthParameter sourceOAuthParameter) throws IOException; SourceOAuthParameter getSourceOAuthParameterWithSecrets(UUID workspaceId, UUID sourceDefinitionId) @@ -27,15 +25,17 @@ SourceOAuthParameter getSourceOAuthParameterWithSecrets(UUID workspaceId, UUID s Optional getSourceOAuthParameterOptional(UUID workspaceId, UUID sourceDefinitionId) throws IOException; + Optional getSourceOAuthParamByDefinitionIdOptional(UUID workspaceId, UUID sourceDefinitionId) throws IOException; + + void writeDestinationOAuthParam(DestinationOAuthParameter destinationOAuthParameter) throws IOException; + DestinationOAuthParameter getDestinationOAuthParameterWithSecrets(UUID workspaceId, UUID destinationDefinitionId) throws IOException, ConfigNotFoundException; - Optional getDestinationOAuthParameterOptional(UUID workspaceId, UUID sourceDefinitionId) + Optional getDestinationOAuthParameterOptional(UUID workspaceId, UUID destinationDefinitionId) throws IOException; Optional getDestinationOAuthParamByDefinitionIdOptional(UUID workspaceId, UUID destinationDefinitionId) throws IOException; - void writeDestinationOAuthParam(DestinationOAuthParameter destinationOAuthParameter) throws IOException; - } diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/CatalogServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/CatalogServiceJooqImpl.java index 08eb33ba43a..8b173777a94 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/CatalogServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/CatalogServiceJooqImpl.java @@ -11,6 +11,7 @@ import com.google.common.base.Charsets; import com.google.common.hash.HashFunction; import com.google.common.hash.Hashing; +import datadog.trace.api.Trace; import io.airbyte.commons.json.Jsons; import io.airbyte.config.ActorCatalog; import io.airbyte.config.ActorCatalogFetchEvent; @@ -200,6 +201,7 @@ public UUID writeActorCatalogFetchEvent(AirbyteCatalog catalog, * @throws IOException - error while interacting with db */ @Override + @Trace public Map getMostRecentActorCatalogFetchEventForSources( List sourceIds) throws IOException { diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectionServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectionServiceJooqImpl.java index 63907598f3d..1965468eaba 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectionServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/ConnectionServiceJooqImpl.java @@ -12,12 +12,14 @@ import static io.airbyte.db.instance.configs.jooq.generated.Tables.NOTIFICATION_CONFIGURATION; import static io.airbyte.db.instance.configs.jooq.generated.Tables.SCHEMA_MANAGEMENT; import static io.airbyte.db.instance.configs.jooq.generated.Tables.STATE; +import static io.airbyte.db.instance.configs.jooq.generated.Tables.WORKSPACE; import static org.jooq.impl.DSL.asterisk; import static org.jooq.impl.DSL.groupConcat; import static org.jooq.impl.DSL.noCondition; import static org.jooq.impl.DSL.select; import com.google.common.annotations.VisibleForTesting; +import datadog.trace.api.Trace; import io.airbyte.commons.enums.Enums; import io.airbyte.commons.json.Jsons; import io.airbyte.config.ConfigSchema; @@ -107,6 +109,7 @@ public void deleteStandardSync(final UUID syncId) throws IOException { * @throws IOException if there is an issue while interacting with db. */ @Override + @Trace public StandardSync getStandardSync(final UUID connectionId) throws JsonValidationException, IOException, ConfigNotFoundException { final List> result = listStandardSyncWithMetadata(Optional.of(connectionId)); @@ -203,6 +206,7 @@ public List listWorkspaceStandardSyncs(final UUID workspaceId, fin * @throws IOException if there is an issue while interacting with db. */ @Override + @Trace public List listWorkspaceStandardSyncs(final StandardSyncQuery standardSyncQuery) throws IOException { final Result connectionAndOperationIdsResult = database.query(ctx -> ctx @@ -455,18 +459,18 @@ public Set listEarlySyncJobs(final int freeUsageInterval, final int jobsFe * Disable a list of connections by setting their status to inactive. * * @param connectionIds list of connection ids to disable + * @return set of connection ids that were updated * @throws IOException if there is an issue while interacting with db. */ @Override - public void disableConnectionsById(final List connectionIds) throws IOException { - database.transaction(ctx -> { - ctx.update(CONNECTION) - .set(CONNECTION.UPDATED_AT, OffsetDateTime.now()) - .set(CONNECTION.STATUS, StatusType.inactive) - .where(CONNECTION.ID.in(connectionIds)) - .execute(); - return null; - }); + public Set disableConnectionsById(final List connectionIds) throws IOException { + return database.transaction(ctx -> ctx.update(CONNECTION) + .set(CONNECTION.UPDATED_AT, OffsetDateTime.now()) + .set(CONNECTION.STATUS, StatusType.inactive) + .where(CONNECTION.ID.in(connectionIds) + .and(CONNECTION.STATUS.eq(StatusType.active))) + .returning(CONNECTION.ID) + .fetchSet(CONNECTION.ID)); } @Override @@ -478,6 +482,17 @@ public List listConnectionIdsForWorkspace(final UUID workspaceId) throws I .fetchInto(UUID.class)); } + @Override + public List listConnectionIdsForOrganization(final UUID organizationId) throws IOException { + return database.query(ctx -> ctx.select(CONNECTION.ID) + .from(CONNECTION) + .join(ACTOR).on(ACTOR.ID.eq(CONNECTION.SOURCE_ID)) + .join(WORKSPACE).on(WORKSPACE.ID.eq(ACTOR.WORKSPACE_ID)) + .where(WORKSPACE.ORGANIZATION_ID.eq(organizationId)) + .and(CONNECTION.STATUS.ne(StatusType.deprecated)) + .fetchInto(UUID.class)); + } + private Set getEarlySyncJobsFromResult(final Result result) { // Transform the result to a list of early sync job ids // the rest of the fields are not used, we aim to keep the set small diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java index f23c5cb2e9b..4fc4b90f2ef 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DbConverter.java @@ -204,9 +204,7 @@ public static Organization buildOrganization(final Record record) { .withOrganizationId(record.get(ORGANIZATION.ID)) .withName(record.get(ORGANIZATION.NAME)) .withUserId(record.get(ORGANIZATION.USER_ID)) - .withEmail(record.get(ORGANIZATION.EMAIL)) - .withPba(record.get(ORGANIZATION.PBA)) - .withOrgLevelBilling(record.get(ORGANIZATION.ORG_LEVEL_BILLING)); + .withEmail(record.get(ORGANIZATION.EMAIL)); } /** diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java index 30d093b2c2d..4cb421427b9 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/DestinationServiceJooqImpl.java @@ -48,6 +48,7 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.time.OffsetDateTime; import java.util.ArrayList; import java.util.Collections; @@ -59,7 +60,6 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.ArrayUtils; import org.jooq.Condition; import org.jooq.DSLContext; @@ -72,11 +72,14 @@ import org.jooq.Result; import org.jooq.SelectJoinStep; import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -@Slf4j @Singleton public class DestinationServiceJooqImpl implements DestinationService { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final ExceptionWrappingDatabase database; private final FeatureFlagClient featureFlagClient; private final SecretsRepositoryReader secretsRepositoryReader; diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/OrganizationServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/OrganizationServiceJooqImpl.java index a5e5c0036ff..e56e044637d 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/OrganizationServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/OrganizationServiceJooqImpl.java @@ -83,8 +83,6 @@ public void writeOrganization(final Organization organization) throws IOExceptio .set(ORGANIZATION.EMAIL, organization.getEmail()) .set(ORGANIZATION.USER_ID, organization.getUserId()) .set(ORGANIZATION.UPDATED_AT, timestamp) - .set(ORGANIZATION.PBA, organization.getPba()) - .set(ORGANIZATION.ORG_LEVEL_BILLING, organization.getOrgLevelBilling()) .where(ORGANIZATION.ID.eq(organization.getOrganizationId())) .execute(); } else { @@ -95,8 +93,6 @@ public void writeOrganization(final Organization organization) throws IOExceptio .set(ORGANIZATION.USER_ID, organization.getUserId()) .set(WORKSPACE.CREATED_AT, timestamp) .set(WORKSPACE.UPDATED_AT, timestamp) - .set(ORGANIZATION.PBA, organization.getPba()) - .set(ORGANIZATION.ORG_LEVEL_BILLING, organization.getOrgLevelBilling()) .execute(); } return null; diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java index bfd2e58d746..2c6d2ffe7a9 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/SourceServiceJooqImpl.java @@ -52,6 +52,7 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.time.OffsetDateTime; import java.util.ArrayList; import java.util.Collections; @@ -63,7 +64,6 @@ import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.ArrayUtils; import org.jooq.Condition; import org.jooq.DSLContext; @@ -76,11 +76,14 @@ import org.jooq.Result; import org.jooq.SelectJoinStep; import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -@Slf4j @Singleton public class SourceServiceJooqImpl implements SourceService { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final ExceptionWrappingDatabase database; private final FeatureFlagClient featureFlagClient; private final SecretsRepositoryReader secretRepositoryReader; diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/WorkspaceServiceJooqImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/WorkspaceServiceJooqImpl.java index 8875c4a92a0..571d576be7d 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/WorkspaceServiceJooqImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/jooq/WorkspaceServiceJooqImpl.java @@ -50,6 +50,7 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.time.OffsetDateTime; import java.util.Collections; import java.util.List; @@ -57,7 +58,6 @@ import java.util.UUID; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; import org.jooq.Condition; import org.jooq.DSLContext; import org.jooq.JSONB; @@ -67,11 +67,14 @@ import org.jooq.Result; import org.jooq.SelectJoinStep; import org.jooq.exception.DataAccessException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; -@Slf4j @Singleton public class WorkspaceServiceJooqImpl implements WorkspaceService { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final ExceptionWrappingDatabase database; private final FeatureFlagClient featureFlagClient; private final SecretsRepositoryReader secretsRepositoryReader; diff --git a/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java b/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java index c914471565f..97cb39b2dfd 100644 --- a/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java +++ b/airbyte-data/src/main/java/io/airbyte/data/services/impls/keycloak/ApplicationServiceKeycloakImpl.java @@ -16,6 +16,7 @@ import jakarta.inject.Singleton; import jakarta.ws.rs.BadRequestException; import jakarta.ws.rs.core.Response; +import java.lang.invoke.MethodHandles; import java.time.Duration; import java.time.Instant; import java.time.OffsetDateTime; @@ -27,13 +28,14 @@ import java.util.Map; import java.util.Optional; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.KeycloakBuilder; import org.keycloak.admin.client.resource.ClientsResource; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.admin.client.resource.UsersResource; import org.keycloak.representations.idm.ClientRepresentation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Application Service for Keycloak. @@ -41,11 +43,12 @@ * An Application for a user or non-user entity i.e. an organization. */ @Singleton -@Slf4j @RequiresAuthMode(AuthMode.OIDC) @SuppressWarnings("PMD.ExceptionAsFlowControl") public class ApplicationServiceKeycloakImpl implements ApplicationService { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + // This number should be kept low or this code will start to do a lot of work. public static final int MAX_CREDENTIALS = 2; public static final String USER_ID = "user_id"; diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/config/OrganizationCustomerAttributesServiceConfig.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/config/OrganizationCustomerAttributesServiceConfig.kt new file mode 100644 index 00000000000..a9310bbdfb4 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/config/OrganizationCustomerAttributesServiceConfig.kt @@ -0,0 +1,43 @@ +package io.airbyte.data.config + +import com.google.auth.oauth2.GoogleCredentials +import com.google.cloud.storage.Storage +import com.google.cloud.storage.StorageOptions +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.context.annotation.Factory +import jakarta.inject.Named +import java.nio.file.Files +import java.nio.file.Paths + +private val logger = KotlinLogging.logger {} + +@Factory +@Named("customerTierStorage") +class OrganizationCustomerAttributesServiceConfig { + fun provideStorage( + gcsApplicationCredentials: String?, + gcsProjectId: String?, + ): Storage? { + if (gcsApplicationCredentials.isNullOrBlank()) { + logger.info { + "Cannot initialize storage for OrganizationCustomerAttributesService;" + + "gcsProjectId=$gcsProjectId gcsApplicationCredentials=$gcsApplicationCredentials" + } + + return null + } + val credentials = GoogleCredentials.fromStream(Files.newInputStream(Paths.get(gcsApplicationCredentials))) + val storage = + StorageOptions.newBuilder() + .setCredentials(credentials) + .setProjectId(gcsProjectId) + .build() + .service + + logger.info { + "Initialized storage for OrganizationCustomerAttributesService for project $gcsProjectId" + } + + return storage + } +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdater.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdater.kt index 8738d64caf6..dc9b7e7afea 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdater.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdater.kt @@ -299,6 +299,23 @@ class ActorDefinitionVersionUpdater( scopedConfigurationService.insertScopedConfigurations(scopedConfigurationsToCreate) } + fun migrateReleaseCandidatePins( + actorDefinitionId: UUID, + origins: List, + newOrigin: String, + newReleaseCandidateVersionId: UUID, + ) { + scopedConfigurationService.updateScopedConfigurationsOriginAndValuesForOriginInList( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigOriginType.CONNECTOR_ROLLOUT, + origins, + newOrigin, + newReleaseCandidateVersionId.toString(), + ) + } + @VisibleForTesting fun removeReleaseCandidatePinsForVersion( actorDefinitionId: UUID, diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/JobsRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/JobsRepository.kt index 46a32d17b4e..4f8d5b2d888 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/JobsRepository.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/JobsRepository.kt @@ -1,9 +1,72 @@ package io.airbyte.data.repositories import io.airbyte.data.repositories.entities.Job +import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus +import io.micronaut.data.annotation.Query import io.micronaut.data.jdbc.annotation.JdbcRepository import io.micronaut.data.model.query.builder.sql.Dialect import io.micronaut.data.repository.PageableRepository @JdbcRepository(dialect = Dialect.POSTGRES, dataSource = "config") -interface JobsRepository : PageableRepository +interface JobsRepository : PageableRepository { + /** + * Counts the number of failed jobs since the last successful job for a given scope. + * If there are no successful jobs, it counts all failed jobs for that scope. + * + * @param scope The scope associated with the connection (UUID as String). + * @return The count of failed jobs since the last successful job. + */ + @Query( + """ + SELECT COUNT(*) + FROM jobs + WHERE scope = :scope + AND status = 'failed' + AND (created_at > ( + SELECT MAX(created_at) + FROM jobs + WHERE scope = :scope + AND status = 'succeeded' + ) OR NOT EXISTS ( + SELECT 1 + FROM jobs + WHERE scope = :scope + AND status = 'succeeded' + )) + """, + ) + fun countFailedJobsSinceLastSuccessForScope(scope: String): Int + + @Query( + """ + SELECT * + FROM jobs + WHERE scope = :scope + AND status = 'succeeded' + ORDER BY created_at DESC + LIMIT 1 + """, + ) + fun lastSuccessfulJobForScope(scope: String): Job? + + @Query( + """ + SELECT * + FROM jobs + WHERE scope = :scope + AND created_at < ( + SELECT created_at + FROM jobs + WHERE id = :jobId + ) + AND status = :status + ORDER BY created_at DESC + LIMIT 1 + """, + ) + fun getPriorJobWithStatusForScopeAndJobId( + scope: String, + jobId: Long, + status: JobStatus, + ): Job? +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ScopedConfigurationRepository.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ScopedConfigurationRepository.kt index 55e1497a2bb..4dd89aeb3c4 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ScopedConfigurationRepository.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/ScopedConfigurationRepository.kt @@ -13,32 +13,32 @@ import java.util.UUID interface ScopedConfigurationRepository : PageableRepository { fun getByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeId( key: String, - resourceType: ConfigResourceType, - resourceId: UUID, + resourceType: ConfigResourceType?, + resourceId: UUID?, scopeType: ConfigScopeType, scopeId: UUID, ): ScopedConfiguration? fun findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndScopeIdInList( key: String, - resourceType: ConfigResourceType, - resourceId: UUID, + resourceType: ConfigResourceType?, + resourceId: UUID?, scopeType: ConfigScopeType, scopeId: List, ): List fun findByKeyAndResourceTypeAndResourceIdAndOriginTypeAndOriginInList( key: String, - resourceType: ConfigResourceType, - resourceId: UUID, + resourceType: ConfigResourceType?, + resourceId: UUID?, originType: ConfigOriginType, origins: List, ): List fun findByKeyAndResourceTypeAndResourceIdAndScopeTypeAndOriginTypeAndValueInList( key: String, - resourceType: ConfigResourceType, - resourceId: UUID, + resourceType: ConfigResourceType?, + resourceId: UUID?, scopeType: ConfigScopeType, originType: ConfigOriginType, values: List, @@ -46,7 +46,13 @@ interface ScopedConfigurationRepository : PageableRepository + + fun findByKeyAndScopeTypeAndScopeId( + key: String, configScopeType: ConfigScopeType, scopeId: UUID, ): List @@ -54,4 +60,14 @@ interface ScopedConfigurationRepository : PageableRepository fun deleteByIdInList(ids: List) + + fun updateByKeyAndResourceTypeAndResourceIdAndOriginTypeAndOriginIn( + key: String, + resourceType: ConfigResourceType, + resourceId: UUID, + originType: ConfigOriginType, + origins: List, + origin: String, + value: String, + ) } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/Organization.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/Organization.kt index 5f38e12290c..ab583c7295b 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/Organization.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/Organization.kt @@ -14,8 +14,6 @@ open class Organization( var name: String, var userId: UUID? = null, var email: String, - var pba: Boolean = false, - var orgLevelBilling: Boolean = false, var tombstone: Boolean = false, @DateCreated var createdAt: java.time.OffsetDateTime? = null, diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/OrganizationPaymentConfig.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/OrganizationPaymentConfig.kt index fc392a40285..28d6b37abd3 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/OrganizationPaymentConfig.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/OrganizationPaymentConfig.kt @@ -1,6 +1,7 @@ package io.airbyte.data.repositories.entities import io.airbyte.db.instance.configs.jooq.generated.enums.PaymentStatus +import io.airbyte.db.instance.configs.jooq.generated.enums.SubscriptionStatus import io.airbyte.db.instance.configs.jooq.generated.enums.UsageCategoryOverride import io.micronaut.data.annotation.DateCreated import io.micronaut.data.annotation.DateUpdated @@ -17,6 +18,8 @@ open class OrganizationPaymentConfig( var paymentProviderId: String? = null, @field:TypeDef(type = DataType.OBJECT) var paymentStatus: PaymentStatus = PaymentStatus.uninitialized, + @field:TypeDef(type = DataType.OBJECT) + var subscriptionStatus: SubscriptionStatus = SubscriptionStatus.pre_subscription, var gracePeriodEndAt: java.time.OffsetDateTime? = null, @field:TypeDef(type = DataType.OBJECT) var usageCategoryOverride: UsageCategoryOverride? = null, diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/ScopedConfiguration.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/ScopedConfiguration.kt index abb0214998a..d5a52931ac7 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/ScopedConfiguration.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/repositories/entities/ScopedConfiguration.kt @@ -24,8 +24,8 @@ data class ScopedConfiguration( var scopeType: ConfigScopeType, var scopeId: UUID, @field:TypeDef(type = DataType.OBJECT) - var resourceType: ConfigResourceType, - var resourceId: UUID, + var resourceType: ConfigResourceType? = null, + var resourceId: UUID? = null, @field:TypeDef(type = DataType.OBJECT) var originType: ConfigOriginType, var origin: String, diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/JobService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/JobService.kt index 134e73fb27c..fbdeecfbad0 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/JobService.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/JobService.kt @@ -6,6 +6,9 @@ import io.airbyte.config.JobStatus import java.time.OffsetDateTime interface JobService { + /** + * List jobs with the given filters. + */ fun listJobs( configTypes: Set, scope: String?, @@ -19,4 +22,23 @@ interface JobService { orderByField: String? = "createdAt", orderByMethod: String? = "desc", ): List + + /** + * Get the last successful job for a given scope. + */ + fun lastSuccessfulJobForScope(scope: String): Job? + + /** + * Counts the number of failed jobs since the last successful job for a given scope. + */ + fun countFailedJobsSinceLastSuccessForScope(scope: String): Int + + /** + * Get the job with the given status that was run before the job with the given ID. + */ + fun getPriorJobWithStatusForScopeAndJobId( + scope: String, + jobId: Long, + status: JobStatus, + ): Job? } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/OrganizationCustomerAttributesService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/OrganizationCustomerAttributesService.kt new file mode 100644 index 00000000000..142892def19 --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/OrganizationCustomerAttributesService.kt @@ -0,0 +1,19 @@ +package io.airbyte.data.services + +import java.util.UUID + +enum class CustomerTier { + TIER_0, + TIER_1, + TIER_2, +} + +/** + * A service that reads organization tier information from GCS. + */ +interface OrganizationCustomerAttributesService { + /** + * Get a map of organization ID to customer tier. + */ + fun getOrganizationTiers(): Map +} diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt index f6a38710cb0..37f8788e9b7 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/PermissionService.kt @@ -7,6 +7,11 @@ import java.util.UUID * A service that manages permissions. */ interface PermissionService { + /** + * Get a permission by its unique id. + */ + fun getPermission(permissionId: UUID): Permission + /** * Get all permissions */ diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/ScopedConfigurationService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/ScopedConfigurationService.kt index 5d0090f467b..ae8d9fcd273 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/ScopedConfigurationService.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/ScopedConfigurationService.kt @@ -65,6 +65,17 @@ interface ScopedConfigurationService { resourceType: ConfigResourceType, ): List + /** + * Get a scoped configuration by key and scope map. + * + * This will resolve the configuration by evaluating the scopes in the priority order defined by the given key. + * Scopes included in the map must be defined as a supported scope in the key definition (see ScopedConfigurationKey). + */ + fun getScopedConfigurations( + configKey: ScopedConfigurationKey, + scopes: Map, + ): List + /** * Get scoped configurations for multiple key, resource and scope map (in batch). * @@ -145,4 +156,17 @@ interface ScopedConfigurationService { * Delete multiple configurations by their IDs. */ fun deleteScopedConfigurations(configIds: List) + + /** + * Update the value for scoped configurations with given origin values for an origin type. + */ + fun updateScopedConfigurationsOriginAndValuesForOriginInList( + key: String, + resourceType: ConfigResourceType, + resourceId: UUID, + originType: ConfigOriginType, + origins: List, + newOrigin: String, + newValue: String, + ) } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/JobServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/JobServiceDataImpl.kt index 737dfdd1b7c..04093eb515f 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/JobServiceDataImpl.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/JobServiceDataImpl.kt @@ -6,6 +6,8 @@ package io.airbyte.data.services.impls.data import io.airbyte.config.Job import io.airbyte.config.JobConfig +import io.airbyte.config.JobStatus +import io.airbyte.data.repositories.JobsRepository import io.airbyte.data.repositories.JobsWithAttemptsRepository import io.airbyte.data.repositories.Specifications import io.airbyte.data.services.JobService @@ -22,6 +24,7 @@ const val DEFAULT_SORT_FIELD = "createdAt" @Singleton class JobServiceDataImpl( private val jobsWithAttemptsRepository: JobsWithAttemptsRepository, + private val jobsRepository: JobsRepository, ) : JobService { override fun listJobs( configTypes: Set, @@ -37,19 +40,33 @@ class JobServiceDataImpl( orderByMethod: String?, ): List { val pageable = buildPageable(limit, offset, orderByField, orderByMethod) - return jobsWithAttemptsRepository.findAll( - Specifications.jobWithAssociatedAttempts( - configTypes = configTypes.map { it.toEntity() }.toSet(), - scope = scope, - statuses = statuses.map { it.toEntity() }.toSet(), - createdAtStart = createdAtStart, - createdAtEnd = createdAtEnd, - updatedAtStart = updatedAtStart, - updatedAtEnd = updatedAtEnd, - ), - pageable, - ) - .toList().map { it.toConfigModel() }.toList() + return jobsWithAttemptsRepository + .findAll( + Specifications.jobWithAssociatedAttempts( + configTypes = configTypes.map { it.toEntity() }.toSet(), + scope = scope, + statuses = statuses.map { it.toEntity() }.toSet(), + createdAtStart = createdAtStart, + createdAtEnd = createdAtEnd, + updatedAtStart = updatedAtStart, + updatedAtEnd = updatedAtEnd, + ), + pageable, + ).toList() + .map { it.toConfigModel() } + .toList() + } + + override fun lastSuccessfulJobForScope(scope: String): Job? = jobsRepository.lastSuccessfulJobForScope(scope)?.toConfigModel() + + override fun countFailedJobsSinceLastSuccessForScope(scope: String): Int = jobsRepository.countFailedJobsSinceLastSuccessForScope(scope) + + override fun getPriorJobWithStatusForScopeAndJobId( + scope: String, + jobId: Long, + status: JobStatus, + ): Job? { + return jobsRepository.getPriorJobWithStatusForScopeAndJobId(scope, jobId, status.toEntity())?.toConfigModel() } private fun buildPageable( @@ -67,10 +84,11 @@ class JobServiceDataImpl( } // withoutTotal is used to get a pageable that won't make a count query - return Pageable.from( - offset / limit, - limit, - Sort.of(order), - ).withoutTotal() + return Pageable + .from( + offset / limit, + limit, + Sort.of(order), + ).withoutTotal() } } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/OrganizationCustomerAttributesServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/OrganizationCustomerAttributesServiceDataImpl.kt new file mode 100644 index 00000000000..e17d8914ebb --- /dev/null +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/OrganizationCustomerAttributesServiceDataImpl.kt @@ -0,0 +1,103 @@ +package io.airbyte.data.services.impls.data + +import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper +import com.google.cloud.storage.Blob +import com.google.cloud.storage.Storage +import com.google.common.annotations.VisibleForTesting +import io.airbyte.data.config.OrganizationCustomerAttributesServiceConfig +import io.airbyte.data.services.CustomerTier +import io.airbyte.data.services.OrganizationCustomerAttributesService +import io.github.oshai.kotlinlogging.KotlinLogging +import io.micronaut.cache.annotation.Cacheable +import io.micronaut.context.annotation.Value +import jakarta.inject.Named +import jakarta.inject.Singleton +import java.util.UUID + +private val logger = KotlinLogging.logger {} +private const val NO_CUSTOMER_TIER = "No Customer Tier" +private const val NO_ORGANIZATION_ID = "No Organization Id" + +@Singleton +open class OrganizationCustomerAttributesServiceDataImpl( + @Value("\${airbyte.connector-rollout.gcs.bucket-name}") private val gcsBucketName: String?, + @Value("\${airbyte.connector-rollout.gcs.application-credentials}") private val gcsApplicationCredentials: String?, + @Value("\${airbyte.connector-rollout.gcs.project-id}") private val gcsProjectId: String?, + @Named("customerTierStorage") private val organizationCustomerAttributeServiceConfig: OrganizationCustomerAttributesServiceConfig, +) : OrganizationCustomerAttributesService { + @Cacheable("organization-customer-attributes") + override fun getOrganizationTiers(): Map { + val storage = organizationCustomerAttributeServiceConfig.provideStorage(gcsApplicationCredentials, gcsProjectId) + if (storage == null) { + logger.warn { "OrganizationCustomerAttributesServiceDataImpl getOrganizationTiers: GCS credentials are missing or invalid." } + return emptyMap() + } + + val mostRecentFile = getMostRecentFile(storage) + return if (mostRecentFile == null) { + logger.warn { "OrganizationCustomerAttributesServiceDataImpl getOrganizationTiers: No files found in bucket $gcsBucketName." } + emptyMap() + } else { + logger.info { "OrganizationCustomerAttributesServiceDataImpl getOrganizationTiers: most recent file: ${mostRecentFile.name}" } + readFileContent(mostRecentFile) + } + } + + @VisibleForTesting + internal fun getMostRecentFile(storage: Storage): Blob? { + val blobs = storage.list(gcsBucketName)?.iterateAll() + return if (blobs == null) { + null + } else { + blobs + .filter { it.name.endsWith(".jsonl") } + .maxByOrNull { extractTimestamp(it.name) } + } + } + + @VisibleForTesting + internal fun extractTimestamp(fileName: String): Long { + logger.info { "OrganizationCustomerAttributesServiceDataImpl.extractTimestamp fileName=$fileName" } + return try { + val timestampPart = fileName.split("_").getOrNull(5) + timestampPart?.toLongOrNull() ?: 0L + } catch (e: Exception) { + logger.warn { "OrganizationCustomerAttributesServiceDataImpl Failed to extract timestamp from file name: $fileName" } + 0L + } + } + + @VisibleForTesting + internal fun readFileContent(blob: Blob): Map { + return try { + val content = blob.getContent() + val jsonLines = String(content).lines().filter { it.isNotBlank() } + jsonLines.mapNotNull { parseJsonLine(it) } + .associate { it.organizationId to it.customerTier } + } catch (e: Exception) { + logger.error(e) { "OrganizationCustomerAttributesServiceDataImpl Failed to read content of the file: ${blob.name}" } + emptyMap() + } + } + + @VisibleForTesting + internal fun parseJsonLine(line: String): OrganizationCustomerTierMapping? { + val jsonObject = jacksonObjectMapper().readTree(line) + val organizationIdString = jsonObject["_airbyte_data"]?.get("organization_id")?.asText() ?: return null + val customerTierString = jsonObject["_airbyte_data"]?.get("customer_tier")?.asText() ?: return null + + return if (organizationIdString == NO_ORGANIZATION_ID || customerTierString == NO_CUSTOMER_TIER) { + null + } else { + OrganizationCustomerTierMapping( + organizationId = UUID.fromString(organizationIdString), + customerTier = CustomerTier.valueOf(customerTierString.replace(" ", "_").uppercase()), + ) + } + } +} + +data class OrganizationCustomerTierMapping( + val organizationId: UUID, + val customerTier: CustomerTier?, +) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt index 10fdb60c866..82fe927f943 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/PermissionServiceDataImpl.kt @@ -22,6 +22,12 @@ open class PermissionServiceDataImpl( private val workspaceService: WorkspaceService, private val permissionRepository: PermissionRepository, ) : PermissionService { + override fun getPermission(permissionId: UUID): Permission { + return permissionRepository.findById(permissionId) + .orElseThrow { ConfigNotFoundException(ConfigSchema.PERMISSION, "Permission not found: $permissionId") } + .toConfigModel() + } + override fun listPermissions(): List { return permissionRepository.find().map { it.toConfigModel() } } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImpl.kt index 2cae33dce7e..13e69a089a8 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImpl.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImpl.kt @@ -117,6 +117,42 @@ class ScopedConfigurationServiceDataImpl(private val repository: ScopedConfigura return scopeConfigMap.values.toList() } + override fun getScopedConfigurations( + configKey: ScopedConfigurationKey, + scopes: Map, + ): List { + for (scopeType in scopes.keys) { + if (!configKey.supportedScopes.contains(scopeType)) { + throw IllegalArgumentException("Scope type $scopeType is not supported by key ${configKey.key}") + } + } + + val scopeConfigMap = HashMap() + // We care about the order in which we loop over the keys, this order is reversed from its declaration. + for (supportedScope in configKey.supportedScopes.reversed()) { + if (!scopes.keys.contains(supportedScope)) { + continue + } + + val scopedConfigs = + repository + .findByKeyAndScopeTypeAndScopeId( + configKey.key, + supportedScope.toEntity(), + // Get the id for this scope + scopes[supportedScope]!!, + ) + .map { it.toConfigModel() } + .toList() + + // For each iteration, add or replace items to give a "sorted" values list + scopeConfigMap.putAll(scopedConfigs.associateBy({ it.resourceId }, { it })) + } + + // Return the values as they are now a list of scoped configs by precedence of supportedScopes. + return scopeConfigMap.values.toList() + } + override fun getScopedConfigurations( configKey: ScopedConfigurationKey, resourceType: ConfigResourceType, @@ -237,4 +273,24 @@ class ScopedConfigurationServiceDataImpl(private val repository: ScopedConfigura override fun deleteScopedConfigurations(configIds: List) { repository.deleteByIdInList(configIds) } + + override fun updateScopedConfigurationsOriginAndValuesForOriginInList( + key: String, + resourceType: ConfigResourceType, + resourceId: UUID, + originType: ConfigOriginType, + origins: List, + newOrigin: String, + newValue: String, + ) { + repository.updateByKeyAndResourceTypeAndResourceIdAndOriginTypeAndOriginIn( + key, + resourceType.toEntity(), + resourceId, + originType.toEntity(), + origins, + newOrigin, + newValue, + ) + } } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/JobMapper.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/JobMapper.kt index fc9d39657c1..2d0b840f9bd 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/JobMapper.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/JobMapper.kt @@ -60,7 +60,7 @@ fun EntityJobWithAssociations.toConfigModel(): ModelJob { fun EntityJob.toConfigModel(): ModelJob { return ModelJob( this.id!!, - Enums.convertTo(this.configType, JobConfig.ConfigType::class.java), + this.configType?.toConfig(), this.scope, Jsons.`object`(this.config, JobConfig::class.java), emptyList(), diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/OrganizationMapper.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/OrganizationMapper.kt index a76a2e5b8e9..31b2c4a97c3 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/OrganizationMapper.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/OrganizationMapper.kt @@ -9,8 +9,6 @@ fun EntityOrganization.toConfigModel(): ModelOrganization = .withName(this.name) .withUserId(this.userId) .withEmail(this.email) - .withPba(this.pba) - .withOrgLevelBilling(this.orgLevelBilling) fun ModelOrganization.toEntity(): EntityOrganization = EntityOrganization( @@ -18,6 +16,4 @@ fun ModelOrganization.toEntity(): EntityOrganization = name = this.name, userId = this.userId, email = this.email, - pba = this.pba, - orgLevelBilling = this.orgLevelBilling, ) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/OrganizationPaymentConfigMapper.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/OrganizationPaymentConfigMapper.kt index 781d5a08221..f91eb878fa1 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/OrganizationPaymentConfigMapper.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/OrganizationPaymentConfigMapper.kt @@ -2,9 +2,11 @@ package io.airbyte.data.services.impls.data.mappers import io.airbyte.config.OrganizationPaymentConfig as ModelOrganizationPaymentConfig import io.airbyte.config.OrganizationPaymentConfig.PaymentStatus as ModelPaymentStatus +import io.airbyte.config.OrganizationPaymentConfig.SubscriptionStatus as ModelSubscriptionStatus import io.airbyte.config.OrganizationPaymentConfig.UsageCategoryOverride as ModelUsageCategoryOverride import io.airbyte.data.repositories.entities.OrganizationPaymentConfig as EntityOrganizationPaymentConfig import io.airbyte.db.instance.configs.jooq.generated.enums.PaymentStatus as EntityPaymentStatus +import io.airbyte.db.instance.configs.jooq.generated.enums.SubscriptionStatus as EntitySubscriptionStatus import io.airbyte.db.instance.configs.jooq.generated.enums.UsageCategoryOverride as EntityUsageCategoryOverride fun EntityOrganizationPaymentConfig.toConfigModel(): ModelOrganizationPaymentConfig = @@ -12,6 +14,7 @@ fun EntityOrganizationPaymentConfig.toConfigModel(): ModelOrganizationPaymentCon .withOrganizationId(this.organizationId) .withPaymentProviderId(this.paymentProviderId) .withPaymentStatus(this.paymentStatus.toConfigModel()) + .withSubscriptionStatus(this.subscriptionStatus.toConfigModel()) .withGracePeriodEndAt(this.gracePeriodEndAt?.toEpochSecond()) .withUsageCategoryOverride(this.usageCategoryOverride?.toConfigModel()) .withCreatedAt(this.createdAt?.toEpochSecond()) @@ -22,6 +25,7 @@ fun ModelOrganizationPaymentConfig.toEntity(): EntityOrganizationPaymentConfig = organizationId = this.organizationId, paymentProviderId = this.paymentProviderId, paymentStatus = this.paymentStatus.toEntity(), + subscriptionStatus = this.subscriptionStatus.toEntity(), gracePeriodEndAt = this.gracePeriodEndAt?.let { java.time.OffsetDateTime.ofInstant( @@ -34,6 +38,13 @@ fun ModelOrganizationPaymentConfig.toEntity(): EntityOrganizationPaymentConfig = updatedAt = this.updatedAt?.let { java.time.OffsetDateTime.ofInstant(java.time.Instant.ofEpochSecond(it), java.time.ZoneOffset.UTC) }, ) +fun EntitySubscriptionStatus.toConfigModel(): ModelSubscriptionStatus = + when (this) { + EntitySubscriptionStatus.pre_subscription -> ModelSubscriptionStatus.PRE_SUBSCRIPTION + EntitySubscriptionStatus.subscribed -> ModelSubscriptionStatus.SUBSCRIBED + EntitySubscriptionStatus.unsubscribed -> ModelSubscriptionStatus.UNSUBSCRIBED + } + fun EntityPaymentStatus.toConfigModel(): ModelPaymentStatus = when (this) { EntityPaymentStatus.uninitialized -> ModelPaymentStatus.UNINITIALIZED @@ -44,6 +55,13 @@ fun EntityPaymentStatus.toConfigModel(): ModelPaymentStatus = EntityPaymentStatus.manual -> ModelPaymentStatus.MANUAL } +fun ModelSubscriptionStatus.toEntity(): EntitySubscriptionStatus = + when (this) { + ModelSubscriptionStatus.PRE_SUBSCRIPTION -> EntitySubscriptionStatus.pre_subscription + ModelSubscriptionStatus.SUBSCRIBED -> EntitySubscriptionStatus.subscribed + ModelSubscriptionStatus.UNSUBSCRIBED -> EntitySubscriptionStatus.unsubscribed + } + fun ModelPaymentStatus.toEntity(): EntityPaymentStatus = when (this) { ModelPaymentStatus.UNINITIALIZED -> EntityPaymentStatus.uninitialized diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/ScopedConfigurationMapper.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/ScopedConfigurationMapper.kt index fce9aef21a3..f1cd599eb6f 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/ScopedConfigurationMapper.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/mappers/ScopedConfigurationMapper.kt @@ -73,7 +73,7 @@ fun EntityScopedConfiguration.toConfigModel(): ModelScopedConfiguration { .withValue(this.value) .withScopeType(this.scopeType.toConfigModel()) .withScopeId(this.scopeId) - .withResourceType(this.resourceType.toConfigModel()) + .withResourceType(this.resourceType?.toConfigModel()) .withResourceId(this.resourceId) .withOriginType(this.originType.toConfigModel()) .withOrigin(this.origin) diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConnectionAutoDisabledReason.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConnectionAutoDisabledReason.kt index e4ae6acf796..40682a13a82 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConnectionAutoDisabledReason.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ConnectionAutoDisabledReason.kt @@ -1,11 +1,12 @@ package io.airbyte.data.services.shared enum class ConnectionAutoDisabledReason { - ONLY_FAILED_JOBS_RECENTLY, - TOO_MANY_CONSECUTIVE_FAILED_JOBS_IN_A_ROW, + TOO_MANY_FAILED_JOBS_WITH_NO_RECENT_SUCCESS, SCHEMA_CHANGES_ARE_BREAKING, DISABLE_CONNECTION_IF_ANY_SCHEMA_CHANGES, INVALID_CREDIT_BALANCE, CONNECTOR_NOT_SUPPORTED, WORKSPACE_IS_DELINQUENT, + INVOICE_MARKED_UNCOLLECTIBLE, + INVALID_PAYMENT_METHOD, } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ScopedConfigurationKey.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ScopedConfigurationKey.kt index e5b15f306af..a1deca5c496 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ScopedConfigurationKey.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/shared/ScopedConfigurationKey.kt @@ -19,7 +19,16 @@ data object ConnectorVersionKey : ScopedConfigurationKey( supportedScopes = listOf(ConfigScopeType.ACTOR, ConfigScopeType.WORKSPACE, ConfigScopeType.ORGANIZATION), ) +/** + * A token used to allow the workload launcher to add labels to a pod for network policy application. + */ +data object NetworkSecurityTokenKey : ScopedConfigurationKey( + key = "network_security_token", + supportedScopes = listOf(ConfigScopeType.WORKSPACE, ConfigScopeType.ORGANIZATION), +) + val ScopedConfigurationKeys: Map = mapOf( ConnectorVersionKey.key to ConnectorVersionKey, + NetworkSecurityTokenKey.key to NetworkSecurityTokenKey, ) diff --git a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/JooqTestDbSetupHelper.java b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/JooqTestDbSetupHelper.java index 84f41685e08..74cbb126678 100644 --- a/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/JooqTestDbSetupHelper.java +++ b/airbyte-data/src/test/java/io/airbyte/data/services/impls/jooq/JooqTestDbSetupHelper.java @@ -38,7 +38,6 @@ import java.util.List; import java.util.Map; import java.util.UUID; -import lombok.Getter; public class JooqTestDbSetupHelper extends BaseConfigDatabaseTest { @@ -52,21 +51,13 @@ public class JooqTestDbSetupHelper extends BaseConfigDatabaseTest { private final UUID SOURCE_DEFINITION_ID = UUID.randomUUID(); private final UUID DESTINATION_DEFINITION_ID = UUID.randomUUID(); private final String DOCKER_IMAGE_TAG = "0.0.1"; - @Getter private Organization organization; - @Getter private StandardWorkspace workspace; - @Getter private StandardSourceDefinition sourceDefinition; - @Getter private StandardDestinationDefinition destinationDefinition; - @Getter private ActorDefinitionVersion sourceDefinitionVersion; - @Getter private ActorDefinitionVersion destinationDefinitionVersion; - @Getter private SourceConnection source; - @Getter private DestinationConnection destination; public JooqTestDbSetupHelper() { @@ -237,9 +228,7 @@ private Organization createBaseOrganization() { return new Organization() .withOrganizationId(ORGANIZATION_ID) .withName("organization") - .withEmail("org@airbyte.io") - .withPba(false) - .withOrgLevelBilling(false); + .withEmail("org@airbyte.io"); } private StandardWorkspace createBaseWorkspace() { @@ -265,4 +254,36 @@ private static ActorDefinitionVersion createBaseActorDefVersion(final UUID actor .withConnectionSpecification(Jsons.jsonNode(Map.of("key", "value1"))).withProtocolVersion("1.0.0")); } + public SourceConnection getSource() { + return source; + } + + public DestinationConnection getDestination() { + return destination; + } + + public Organization getOrganization() { + return organization; + } + + public StandardWorkspace getWorkspace() { + return workspace; + } + + public StandardSourceDefinition getSourceDefinition() { + return sourceDefinition; + } + + public StandardDestinationDefinition getDestinationDefinition() { + return destinationDefinition; + } + + public ActorDefinitionVersion getSourceDefinitionVersion() { + return sourceDefinitionVersion; + } + + public ActorDefinitionVersion getDestinationDefinitionVersion() { + return destinationDefinitionVersion; + } + } diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdaterTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdaterTest.kt index 1f33c11db14..d13f58edc24 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdaterTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/helpers/ActorDefinitionVersionUpdaterTest.kt @@ -21,8 +21,10 @@ import io.airbyte.featureflag.ANONYMOUS import io.airbyte.featureflag.TestClient import io.airbyte.featureflag.UseBreakingChangeScopes import io.airbyte.featureflag.Workspace +import io.mockk.Runs import io.mockk.clearAllMocks import io.mockk.every +import io.mockk.just import io.mockk.mockk import io.mockk.slot import io.mockk.verify @@ -726,6 +728,45 @@ internal class ActorDefinitionVersionUpdaterTest { } } + @Test + fun testMigrateReleaseCandidatePins() { + val actorDefinitionId = UUID.randomUUID() + val origins = listOf("origin1", "origin2") + val newOrigin = "origin3" + val newReleaseCandidateVersionId = UUID.randomUUID() + + every { + scopedConfigurationService.updateScopedConfigurationsOriginAndValuesForOriginInList( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigOriginType.CONNECTOR_ROLLOUT, + origins, + newOrigin, + newReleaseCandidateVersionId.toString(), + ) + } just Runs + + actorDefinitionVersionUpdater.migrateReleaseCandidatePins( + actorDefinitionId, + origins, + newOrigin, + newReleaseCandidateVersionId, + ) + + verify { + scopedConfigurationService.updateScopedConfigurationsOriginAndValuesForOriginInList( + ConnectorVersionKey.key, + ConfigResourceType.ACTOR_DEFINITION, + actorDefinitionId, + ConfigOriginType.CONNECTOR_ROLLOUT, + origins, + newOrigin, + newReleaseCandidateVersionId.toString(), + ) + } + } + @ParameterizedTest @MethodSource("getBreakingChangesAfterVersionMethodSource") fun testGetBreakingChangesAfterVersion( diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/JobsRepositoryTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/JobsRepositoryTest.kt new file mode 100644 index 00000000000..7a98649fd81 --- /dev/null +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/JobsRepositoryTest.kt @@ -0,0 +1,149 @@ +package io.airbyte.data.repositories + +import io.airbyte.commons.json.Jsons +import io.airbyte.data.repositories.entities.Job +import io.airbyte.db.instance.jobs.jooq.generated.enums.JobConfigType +import io.airbyte.db.instance.jobs.jooq.generated.enums.JobStatus +import io.kotest.matchers.shouldBe +import io.micronaut.context.env.Environment +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test +import java.time.OffsetDateTime +import java.time.ZoneOffset + +@MicronautTest(environments = [Environment.TEST]) +internal class JobsRepositoryTest : AbstractConfigRepositoryTest() { + private val scope1 = "scope1" + private val scope2 = "scope2" + private val scope3 = "scope3" + private val config = Jsons.jsonNode(mapOf()) + private var nextCreatedAt = OffsetDateTime.of(2021, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC) + + @AfterEach + fun cleanup() { + jobsRepository.deleteAll() + } + + /** + * Create a job with the given scope and status. + * Increments the job ID and created at time on each call so that + * created jobs are ordered and unique. + */ + private fun createJob( + id: Int, + scope: String, + status: JobStatus, + ) = Job( + id = id.toLong(), + scope = scope, + status = status, + configType = JobConfigType.sync, + config = config, + createdAt = nextCreatedAt, + ).also { nextCreatedAt = nextCreatedAt.plusDays(1) } + + @Nested + inner class CountFailedJobsSinceLastSuccessForScope { + @Test + fun `test count failed jobs since last success for scope`() { + val jobs = + listOf( + createJob(1, scope1, JobStatus.failed), + createJob(2, scope1, JobStatus.succeeded), + createJob(3, scope1, JobStatus.failed), + // wrong scope + createJob(4, scope2, JobStatus.failed), + createJob(5, scope1, JobStatus.cancelled), + createJob(6, scope1, JobStatus.failed), + ) + + jobsRepository.saveAll(jobs) + + val result = jobsRepository.countFailedJobsSinceLastSuccessForScope(scope1) + + result.shouldBe(2) + } + + @Test + fun `test count all failed jobs if no success`() { + val jobs = + listOf( + createJob(1, scope1, JobStatus.failed), + createJob(2, scope1, JobStatus.failed), + createJob(3, scope1, JobStatus.failed), + // wrong scope + createJob(4, scope2, JobStatus.failed), + createJob(5, scope1, JobStatus.cancelled), + createJob(6, scope1, JobStatus.failed), + createJob(7, scope1, JobStatus.running), + ) + + jobsRepository.saveAll(jobs) + + val result = jobsRepository.countFailedJobsSinceLastSuccessForScope(scope1) + + result.shouldBe(4) + } + } + + @Nested + inner class LastSuccessfulJobForScope { + @Test + fun `returns most recent succeeded job for scope`() { + val jobs = + listOf( + createJob(1, scope1, JobStatus.failed), + createJob(2, scope1, JobStatus.succeeded), + createJob(3, scope1, JobStatus.failed), + // most recent success for scope1 + createJob(4, scope1, JobStatus.succeeded), + // most recent success for scope2 + createJob(5, scope2, JobStatus.succeeded), + createJob(6, scope1, JobStatus.cancelled), + createJob(7, scope1, JobStatus.failed), + // scope3 never succeeded + createJob(8, scope3, JobStatus.running), + ) + + jobsRepository.saveAll(jobs) + + val resultScope1 = jobsRepository.lastSuccessfulJobForScope(scope1) + val resultScope2 = jobsRepository.lastSuccessfulJobForScope(scope2) + val resultScope3 = jobsRepository.lastSuccessfulJobForScope(scope3) + + resultScope1?.id.shouldBe(4) + resultScope2?.id.shouldBe(5) + resultScope3.shouldBe(null) + } + } + + @Nested + inner class GetPriorJobWithStatusForScopeAndJobId { + @Test + fun `returns most recent job with given status before given job ID`() { + val jobs = + listOf( + createJob(1, scope1, JobStatus.failed), + createJob(2, scope1, JobStatus.succeeded), + createJob(3, scope1, JobStatus.failed), + createJob(4, scope2, JobStatus.succeeded), + createJob(5, scope2, JobStatus.failed), + createJob(6, scope2, JobStatus.running), + createJob(7, scope3, JobStatus.failed), + createJob(8, scope3, JobStatus.failed), + ) + + jobsRepository.saveAll(jobs) + + val result1 = jobsRepository.getPriorJobWithStatusForScopeAndJobId(scope1, 3, JobStatus.failed) + val result2 = jobsRepository.getPriorJobWithStatusForScopeAndJobId(scope2, 6, JobStatus.succeeded) + val result3 = jobsRepository.getPriorJobWithStatusForScopeAndJobId(scope3, 8, JobStatus.succeeded) + + result1?.id.shouldBe(1) + result2?.id.shouldBe(4) + result3.shouldBe(null) + } + } +} diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/OrganizationRepositoryTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/OrganizationRepositoryTest.kt index 2c1db81a222..cb749d41c3a 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/OrganizationRepositoryTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/repositories/OrganizationRepositoryTest.kt @@ -19,8 +19,6 @@ class OrganizationRepositoryTest : AbstractConfigRepositoryTest() { name = "Test Organization", email = "test@example.com", userId = UUID.randomUUID(), - pba = true, - orgLevelBilling = true, ) organizationRepository.save(organization) @@ -41,8 +39,6 @@ class OrganizationRepositoryTest : AbstractConfigRepositoryTest() { name = "Test Organization", email = "test@example.com", userId = UUID.randomUUID(), - pba = true, - orgLevelBilling = true, ) organizationRepository.save(organization) @@ -62,8 +58,6 @@ class OrganizationRepositoryTest : AbstractConfigRepositoryTest() { name = "Test Organization", email = "test@example.com", userId = UUID.randomUUID(), - pba = true, - orgLevelBilling = true, ) organizationRepository.save(organization) @@ -105,8 +99,6 @@ class OrganizationRepositoryTest : AbstractConfigRepositoryTest() { name = "Test Organization", email = "test@example.com", userId = UUID.randomUUID(), - pba = true, - orgLevelBilling = true, ) val savedOrg = organizationRepository.save(organization) diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/OrganizationCustomerAttributesServiceDataImplTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/OrganizationCustomerAttributesServiceDataImplTest.kt new file mode 100644 index 00000000000..7bea93d403a --- /dev/null +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/OrganizationCustomerAttributesServiceDataImplTest.kt @@ -0,0 +1,190 @@ +package io.airbyte.data.services.impls.data + +import com.fasterxml.jackson.core.JsonParseException +import com.google.api.gax.paging.Page +import com.google.cloud.storage.Blob +import com.google.cloud.storage.Storage +import io.airbyte.data.config.OrganizationCustomerAttributesServiceConfig +import io.airbyte.data.services.CustomerTier +import io.github.oshai.kotlinlogging.KotlinLogging +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertThrows +import java.util.UUID + +private val logger = KotlinLogging.logger {} + +class OrganizationCustomerAttributesServiceDataImplTest { + private lateinit var storageMock: Storage + private lateinit var blobMock: Blob + private lateinit var organizationCustomerAttributeService: OrganizationCustomerAttributesServiceDataImpl + private lateinit var organizationCustomerAttributeServiceConfig: OrganizationCustomerAttributesServiceConfig + + @BeforeEach + fun setUp() { + storageMock = mockk() + organizationCustomerAttributeServiceConfig = mockk() + blobMock = mockk() + organizationCustomerAttributeService = + OrganizationCustomerAttributesServiceDataImpl( + gcsBucketName = "test-bucket", + gcsApplicationCredentials = "creds", + gcsProjectId = "projectId", + organizationCustomerAttributeServiceConfig = organizationCustomerAttributeServiceConfig, + ) + } + + @Test + fun `test getOrganizationTiers returns empty map when storage is null`() { + every { organizationCustomerAttributeServiceConfig.provideStorage(any(), any()) } returns null + + val result = organizationCustomerAttributeService.getOrganizationTiers() + + assertTrue(result.isEmpty()) + logger.info { "Storage is null: Result verified as empty map." } + } + + @Test + fun `test getOrganizationTiers returns empty map when no files exist`() { + every { organizationCustomerAttributeServiceConfig.provideStorage(any(), any()) } returns storageMock + every { storageMock.list("test-bucket") } returns null + + val result = organizationCustomerAttributeService.getOrganizationTiers() + + assertTrue(result.isEmpty()) + verify { storageMock.list("test-bucket") } + } + + @Test + fun `test getOrganizationTiers reads and parses the most recent file`() { + val blob1 = mockk() + val blob2 = mockk() + + every { blob1.name } returns "data/sales_customer_attributes/2024_11_24_1732490206043_0.jsonl" + every { blob2.name } returns "data/sales_customer_attributes/2024_11_24_1732490206044_0.jsonl" + every { organizationCustomerAttributeServiceConfig.provideStorage(any(), any()) } returns storageMock + + every { storageMock.list("test-bucket") } returns + mockk>().apply { + every { iterateAll() } returns listOf(blob1, blob2) + } + + every { blob2.getContent() } returns + """ + {"_airbyte_data":{"organization_id":"00000000-0000-0000-0000-000000000001","customer_tier":"Tier 1"}} + {"_airbyte_data":{"organization_id":"00000000-0000-0000-0000-000000000002","customer_tier":"No Customer Tier"}} + {"_airbyte_data":{"organization_id":"No Organization Id","customer_tier":"No Customer Tier"}} + {"_airbyte_data":{"organization_id":"No Organization Id","customer_tier":"Tier 1"}} + """.trimIndent().toByteArray() + + val result = organizationCustomerAttributeService.getOrganizationTiers() + + assertEquals(1, result.size) // Only valid entries should be included + assertEquals(CustomerTier.TIER_1, result[UUID.fromString("00000000-0000-0000-0000-000000000001")]) + + verify { storageMock.list("test-bucket") } + } + + @Test + fun `test getMostRecentFile returns the most recent file`() { + val blob1 = mockk() + val blob2 = mockk() + val blob3 = mockk() + + every { blob1.name } returns "data/sales_customer_attributes/2024_11_24_1732490206041_0.jsonl" + every { blob2.name } returns "data/sales_customer_attributes/2024_11_24_1732490206047_0.jsonl" + every { blob3.name } returns "data/sales_customer_attributes/2024_11_24_1732490206044_0.jsonl" + + every { storageMock.list("test-bucket") } returns + mockk>().apply { + every { iterateAll() } returns listOf(blob1, blob2, blob3) + } + + val mostRecentFile = organizationCustomerAttributeService.getMostRecentFile(storageMock) + + assertEquals(blob2, mostRecentFile) + verify { + storageMock.list("test-bucket") + } + } + + @Test + fun `test getMostRecentFile returns the most recent file when no files exist`() { + every { organizationCustomerAttributeServiceConfig.provideStorage(any(), any()) } returns storageMock + every { storageMock.list("test-bucket") } returns null + + val mostRecentFile = organizationCustomerAttributeService.getMostRecentFile(storageMock) + + assertEquals(null, mostRecentFile) + } + + @Test + fun `test extractTimestamp extracts valid timestamp`() { + val fileName = "data/sales_customer_attributes/2024_11_24_1732490206044_0.jsonl" + + val result = organizationCustomerAttributeService.extractTimestamp(fileName) + + assertEquals(1732490206044, result) + } + + @Test + fun `test extractTimestamp handles invalid timestamp format`() { + val fileName = "invalid_file_name.jsonl" + + val result = organizationCustomerAttributeService.extractTimestamp(fileName) + + assertEquals(0L, result) + } + + @Test + fun `test readFileContent parses valid file content`() { + val blobMock = mockk() + val fileContent = + """ + {"_airbyte_data":{"organization_id":"00000000-0000-0000-0000-000000000001","customer_tier":"Tier 1"}} + {"_airbyte_data":{"organization_id":"00000000-0000-0000-0000-000000000002","customer_tier":"Tier 2"}} + """.trimIndent() + + every { blobMock.getContent() } returns fileContent.toByteArray() + + val result = organizationCustomerAttributeService.readFileContent(blobMock) + + assertEquals(2, result.size) + assertEquals(CustomerTier.TIER_1, result[UUID.fromString("00000000-0000-0000-0000-000000000001")]) + assertEquals(CustomerTier.TIER_2, result[UUID.fromString("00000000-0000-0000-0000-000000000002")]) + } + + @Test + fun `test readFileContent throws exception on invalid file content`() { + val blobMock = mockk() + val invalidContent = "invalid-json-content" + + every { blobMock.getContent() } returns invalidContent.toByteArray() + + val result = organizationCustomerAttributeService.readFileContent(blobMock) + + assertEquals(emptyMap(), result) + } + + @Test + fun `test parseJsonLine parses valid JSON line`() { + val jsonLine = """{"_airbyte_data":{"organization_id":"00000000-0000-0000-0000-000000000001","customer_tier":"Tier 1"}}""" + + val result = organizationCustomerAttributeService.parseJsonLine(jsonLine) + + assertEquals(UUID.fromString("00000000-0000-0000-0000-000000000001"), result?.organizationId) + assertEquals(CustomerTier.TIER_1, result?.customerTier) + } + + @Test + fun `test parseJsonLine throws for invalid JSON`() { + val jsonLine = """invalid-json""" + + assertThrows { organizationCustomerAttributeService.parseJsonLine(jsonLine) } + } +} diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/OrganizationServiceDataImplTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/OrganizationServiceDataImplTest.kt index d4758626276..62af21df2ae 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/OrganizationServiceDataImplTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/OrganizationServiceDataImplTest.kt @@ -21,8 +21,6 @@ private val BASE_ORGANIZATION = userId = UUID.randomUUID() name = "Test Organization" email = "test@airbyte.io" - pba = false - orgLevelBilling = false } private val ORGANIZATION_WITH_ID = @@ -31,8 +29,6 @@ private val ORGANIZATION_WITH_ID = userId = BASE_ORGANIZATION.userId name = BASE_ORGANIZATION.name email = BASE_ORGANIZATION.email - pba = BASE_ORGANIZATION.pba - orgLevelBilling = BASE_ORGANIZATION.orgLevelBilling } class OrganizationServiceDataImplTest { diff --git a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImplTest.kt b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImplTest.kt index dc1351cbeae..0f7669b2222 100644 --- a/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImplTest.kt +++ b/airbyte-data/src/test/kotlin/io/airbyte/data/services/impls/data/ScopedConfigurationServiceDataImplTest.kt @@ -10,8 +10,10 @@ import io.airbyte.data.services.impls.data.mappers.toConfigModel import io.airbyte.data.services.shared.ConfigScopeMapWithId import io.airbyte.data.services.shared.ScopedConfigurationKey import io.airbyte.db.instance.configs.jooq.generated.enums.ConfigOriginType +import io.mockk.Runs import io.mockk.clearAllMocks import io.mockk.every +import io.mockk.just import io.mockk.justRun import io.mockk.mockk import io.mockk.verify @@ -182,6 +184,56 @@ internal class ScopedConfigurationServiceDataImplTest { } } + @Test + fun `test get configuration by only scope and key object`() { + val configKey = + ScopedConfigurationKey( + key = "test-key", + supportedScopes = listOf(ModelConfigScopeType.WORKSPACE), + ) + + val configId = UUID.randomUUID() + val scopeId = UUID.randomUUID() + + val config = + ScopedConfiguration( + id = configId, + key = configKey.key, + value = "value", + scopeType = EntityConfigScopeType.workspace, + scopeId = scopeId, + resourceType = null, + resourceId = null, + originType = ConfigOriginType.user, + origin = "my_user_id", + description = "my_description", + ) + + every { + scopedConfigurationRepository.findByKeyAndScopeTypeAndScopeId( + configKey.key, + EntityConfigScopeType.workspace, + scopeId, + ) + } returns listOf(config) + + val retrievedConfig = + scopedConfigurationService.getScopedConfigurations( + configKey, + mapOf(ModelConfigScopeType.WORKSPACE to scopeId), + ) + + assert(retrievedConfig == listOf(config.toConfigModel())) + + verify { + scopedConfigurationRepository.findByKeyAndScopeTypeAndScopeId( + configKey.key, + EntityConfigScopeType.workspace, + scopeId, + ) + } + } + @Test fun `test get configurations by scope map and key object`() { val configKey = @@ -953,4 +1005,77 @@ internal class ScopedConfigurationServiceDataImplTest { verifyAll { scopedConfigurationRepository.deleteByIdInList(configIds) } } + + @Test + fun `test update scoped configurations values for origin in list`() { + val resourceId = UUID.randomUUID() + val newValue = "updated_value" + val origin1 = UUID.randomUUID().toString() + val origin2 = UUID.randomUUID().toString() + val newOrigin = UUID.randomUUID().toString() + + val config1 = + ScopedConfiguration( + id = UUID.randomUUID(), + key = "key", + value = "old_value", + scopeType = EntityConfigScopeType.actor, + scopeId = UUID.randomUUID(), + resourceType = EntityConfigResourceType.actor_definition, + resourceId = resourceId, + originType = ConfigOriginType.connector_rollout, + origin = origin1, + description = "description1", + ) + + val config2 = + ScopedConfiguration( + id = UUID.randomUUID(), + key = "key", + value = "old_value2", + scopeType = EntityConfigScopeType.actor, + scopeId = UUID.randomUUID(), + resourceType = EntityConfigResourceType.actor_definition, + resourceId = resourceId, + originType = ConfigOriginType.connector_rollout, + origin = origin2, + description = "description2", + ) + + val origins = listOf(origin1, origin2) + + every { + scopedConfigurationRepository.updateByKeyAndResourceTypeAndResourceIdAndOriginTypeAndOriginIn( + "key", + EntityConfigResourceType.actor_definition, + resourceId, + ConfigOriginType.connector_rollout, + origins, + newOrigin, + newValue, + ) + } just Runs + + scopedConfigurationService.updateScopedConfigurationsOriginAndValuesForOriginInList( + key = "key", + resourceType = ModelConfigResourceType.ACTOR_DEFINITION, + resourceId = resourceId, + originType = ModelConfigOriginType.CONNECTOR_ROLLOUT, + origins = origins, + newOrigin = newOrigin, + newValue = newValue, + ) + + verify { + scopedConfigurationRepository.updateByKeyAndResourceTypeAndResourceIdAndOriginTypeAndOriginIn( + "key", + EntityConfigResourceType.actor_definition, + resourceId, + ConfigOriginType.connector_rollout, + origins, + newOrigin, + newValue, + ) + } + } } diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_006__MakeResourceColumnsNullableScopedConfiguration.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_006__MakeResourceColumnsNullableScopedConfiguration.java new file mode 100644 index 00000000000..3f8def49ac7 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_006__MakeResourceColumnsNullableScopedConfiguration.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V1_1_0_006__MakeResourceColumnsNullableScopedConfiguration extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V1_1_0_006__MakeResourceColumnsNullableScopedConfiguration.class); + private static final String SCOPED_CONFIGURATION = "scoped_configuration"; + private static final String RESOURCE_TYPE = "resource_type"; + private static final String RESOURCE_ID = "resource_id"; + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + final DSLContext ctx = DSL.using(context.getConnection()); + runMigration(ctx); + } + + public static void runMigration(final DSLContext ctx) { + ctx.alterTable(SCOPED_CONFIGURATION) + .alter(DSL.field(RESOURCE_ID)).dropNotNull() + .execute(); + + ctx.alterTable(SCOPED_CONFIGURATION) + .alter(DSL.field(RESOURCE_TYPE)).dropNotNull() + .execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_007__AddSubscriptionStatusToOrganizationPaymentConfig.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_007__AddSubscriptionStatusToOrganizationPaymentConfig.java new file mode 100644 index 00000000000..fc21b3c4bb5 --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_007__AddSubscriptionStatusToOrganizationPaymentConfig.java @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import java.util.Arrays; +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jetbrains.annotations.NotNull; +import org.jooq.Catalog; +import org.jooq.DSLContext; +import org.jooq.EnumType; +import org.jooq.Field; +import org.jooq.Schema; +import org.jooq.impl.DSL; +import org.jooq.impl.SQLDataType; +import org.jooq.impl.SchemaImpl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V1_1_0_007__AddSubscriptionStatusToOrganizationPaymentConfig extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V1_1_0_007__AddSubscriptionStatusToOrganizationPaymentConfig.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + + final Field subscriptionStatusField = + DSL.field("subscription_status", SQLDataType.VARCHAR.asEnumDataType(SubscriptionStatus.class) + .nullable(false) + .defaultValue(SubscriptionStatus.PRE_SUBSCRIPTION)); + + ctx.createType(SubscriptionStatus.NAME) + .asEnum(Arrays.stream(SubscriptionStatus.values()).map(SubscriptionStatus::getLiteral).toArray(String[]::new)) + .execute(); + + ctx.alterTable("organization_payment_config") + .addColumnIfNotExists(subscriptionStatusField) + .execute(); + } + + public enum SubscriptionStatus implements EnumType { + + PRE_SUBSCRIPTION("pre_subscription"), + UNSUBSCRIBED("unsubscribed"), + SUBSCRIBED("subscribed"); + + private final String literal; + public static final String NAME = "subscription_status"; + + SubscriptionStatus(final String literal) { + this.literal = literal; + } + + @Override + public Catalog getCatalog() { + return getSchema().getCatalog(); + } + + @Override + public Schema getSchema() { + return new SchemaImpl(DSL.name("public")); + } + + @Override + public String getName() { + return NAME; + } + + @Override + public @NotNull String getLiteral() { + return literal; + } + + } + +} diff --git a/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_008__RemoveLegacyBillingColumnsFromOrganization.java b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_008__RemoveLegacyBillingColumnsFromOrganization.java new file mode 100644 index 00000000000..29ebecd72ed --- /dev/null +++ b/airbyte-db/db-lib/src/main/java/io/airbyte/db/instance/configs/migrations/V1_1_0_008__RemoveLegacyBillingColumnsFromOrganization.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.db.instance.configs.migrations; + +import org.flywaydb.core.api.migration.BaseJavaMigration; +import org.flywaydb.core.api.migration.Context; +import org.jooq.DSLContext; +import org.jooq.impl.DSL; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class V1_1_0_008__RemoveLegacyBillingColumnsFromOrganization extends BaseJavaMigration { + + private static final Logger LOGGER = LoggerFactory.getLogger(V1_1_0_008__RemoveLegacyBillingColumnsFromOrganization.class); + + @Override + public void migrate(final Context context) throws Exception { + LOGGER.info("Running migration: {}", this.getClass().getSimpleName()); + + // Warning: please do not use any jOOQ generated code to write a migration. + // As database schema changes, the generated jOOQ code can be deprecated. So + // old migration may not compile if there is any generated code. + final DSLContext ctx = DSL.using(context.getConnection()); + + ctx.alterTable("organization") + .dropColumnIfExists("pba") + .execute(); + + ctx.alterTable("organization") + .dropColumnIfExists("org_level_billing") + .execute(); + } + +} diff --git a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt index d8630cca86c..657e6d046e6 100644 --- a/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt +++ b/airbyte-db/db-lib/src/main/resources/configs_database/schema_dump.txt @@ -279,8 +279,6 @@ create table "public"."organization" ( "email" varchar(256) not null, "created_at" timestamp(6) with time zone not null default current_timestamp, "updated_at" timestamp(6) with time zone not null default current_timestamp, - "pba" boolean not null default false, - "org_level_billing" boolean not null default false, "tombstone" boolean not null default false, constraint "organization_pkey" primary key ("id") ); @@ -300,6 +298,7 @@ create table "public"."organization_payment_config" ( "usage_category_override" "public"."usage_category_override", "created_at" timestamp(6) with time zone not null default current_timestamp, "updated_at" timestamp(6) with time zone not null default current_timestamp, + "subscription_status" "public"."subscription_status" not null default cast('pre_subscription' as subscription_status), constraint "organization_payment_config_pkey" primary key ("organization_id"), constraint "organization_payment_config_payment_provider_id_key" unique ("payment_provider_id") ); @@ -327,8 +326,8 @@ create table "public"."schema_management" ( create table "public"."scoped_configuration" ( "id" uuid not null, "key" varchar(256) not null, - "resource_type" "public"."config_resource_type" not null, - "resource_id" uuid not null, + "resource_type" "public"."config_resource_type", + "resource_id" uuid, "scope_type" "public"."config_scope_type" not null, "scope_id" uuid not null, "value" varchar(256) not null, diff --git a/airbyte-featureflag/src/main/kotlin/Context.kt b/airbyte-featureflag/src/main/kotlin/Context.kt index eaa79c4d8fb..d0d4b70267c 100644 --- a/airbyte-featureflag/src/main/kotlin/Context.kt +++ b/airbyte-featureflag/src/main/kotlin/Context.kt @@ -284,3 +284,28 @@ data object Empty : Context { override val kind: String = "empty" override val key: String = "" } + +data class CloudProvider(override val key: String) : Context { + override val kind: String = "cloud-provider" + + companion object { + const val AWS = "aws" + } +} + +data class GeographicRegion(override val key: String) : Context { + override val kind: String = "geographic-region" + + companion object { + const val US = "us" + const val EU = "eu" + } +} + +data class CloudProviderRegion(override val key: String) : Context { + override val kind: String = "cloud-provider-region" + + companion object { + const val AWS_US_EAST_1 = "us-east-1" + } +} diff --git a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt index 7e1e7d8c24b..cbd195bb545 100644 --- a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt +++ b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt @@ -83,10 +83,6 @@ object SourceResourceOverrides : Temporary(key = "source-resource-overri object ConnectorApmEnabled : Permanent(key = "connectors.apm-enabled", default = false) -object AutoRechargeEnabled : Permanent(key = "billing.autoRecharge", default = false) - -object BillingInArrearsForNewSignups : Temporary(key = "billing.inArrearsForNewSignups", default = false) - object BillingMigrationMaintenance : Temporary(key = "billing.migrationMaintenance", default = false) // NOTE: this is deprecated in favor of FieldSelectionEnabled and will be removed once that flag is fully deployed. @@ -162,7 +158,7 @@ object EnableResumableFullRefresh : Temporary(key = "platform.enable-re object AlwaysRunCheckBeforeSync : Permanent(key = "platform.always-run-check-before-sync", default = false) -object DiscoverPostprocessInTemporal : Permanent(key = "platform.discover-postprocess-in-temporal", default = false) +object DiscoverPostprocessInTemporal : Permanent(key = "platform.discover-postprocess-in-temporal", default = true) object RestrictLoginsForSSODomains : Temporary(key = "platform.restrict-logins-for-sso-domains", default = false) @@ -176,12 +172,16 @@ object DisableAuthHeaderReplacement : Temporary(key = "platform.disable object NodeSelectorOverride : Temporary(key = "platform.node-selector-override", default = "") -object UseAsyncReplicate : Temporary(key = "platform.use-async-replicate", default = false) +object UseAsyncReplicate : Temporary(key = "platform.use-async-replicate", default = true) -object UseAsyncActivities : Temporary(key = "platform.use-async-activities", default = false) +object UseAsyncActivities : Temporary(key = "platform.use-async-activities", default = true) object ReportConnectorDiskUsage : Temporary(key = "platform.report-connector-disk-usage", default = false) object PlatformInitContainerImage : Temporary(key = "platform.init-container-image", default = "") -object StructuredLogs : Temporary(key = "platform.structured-logs", default = false) +object SubOneHourSyncSchedules : Permanent(key = "platform.allow-sub-one-hour-sync-frequency", default = false) + +object AllowMappersDefaultSecretPersistence : Permanent(key = "platform.allow-mappers-default-secret-persistence", default = false) + +object RunDeclarativeSourcesUpdater : Permanent(key = "platform.run-declarative-sources-updater", default = true) diff --git a/airbyte-keycloak-setup/build.gradle.kts b/airbyte-keycloak-setup/build.gradle.kts index adbaaeb5b7d..c2b51c53bc4 100644 --- a/airbyte-keycloak-setup/build.gradle.kts +++ b/airbyte-keycloak-setup/build.gradle.kts @@ -5,8 +5,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/Application.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/Application.java index c68f1c6f35e..3a5ddf87ffc 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/Application.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/Application.java @@ -6,15 +6,18 @@ import io.micronaut.context.ApplicationContext; import io.micronaut.runtime.Micronaut; -import lombok.extern.slf4j.Slf4j; +import java.lang.invoke.MethodHandles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Main application entry point responsible for setting up the Keycloak server with an Airbyte * client. */ -@Slf4j public class Application { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static void main(final String[] args) { try { final ApplicationContext applicationContext = Micronaut.run(Application.class, args); diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersConfigurator.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersConfigurator.java index d7fd80ae6ce..e75b980886e 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersConfigurator.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/IdentityProvidersConfigurator.java @@ -7,22 +7,25 @@ import io.airbyte.commons.auth.config.OidcConfig; import jakarta.inject.Singleton; import jakarta.ws.rs.core.Response; +import java.lang.invoke.MethodHandles; import java.util.List; import java.util.Map; import java.util.Optional; -import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.representations.idm.IdentityProviderRepresentation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is responsible for configuring an identity provider. It creates and manages various * identity providers for authentication purposes. */ @Singleton -@Slf4j @SuppressWarnings("PMD.LiteralsFirstInComparisons") public class IdentityProvidersConfigurator { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + static final String AIRBYTE_MANAGED_IDP_KEY = "airbyte-managed-idp"; static final String AIRBYTE_MANAGED_IDP_VALUE = "true"; private static final String KEYCLOAK_PROVIDER_ID = "oidc"; // OIDC is the only supported provider ID for now diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakServer.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakServer.java index d2be04fd4a8..6231a0f628c 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakServer.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakServer.java @@ -8,20 +8,23 @@ import io.airbyte.commons.auth.keycloak.ClientScopeConfigurator; import jakarta.inject.Named; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.util.Map; -import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.Keycloak; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.representations.idm.RealmRepresentation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class represents the Keycloak server. It contains methods to register an initial user, web * client and identity provider */ @Singleton -@Slf4j public class KeycloakServer { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String FRONTEND_URL_ATTRIBUTE = "frontendUrl"; private final Keycloak keycloakAdminClient; diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakSetup.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakSetup.java index c7cbc0cb57c..35afc543a55 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakSetup.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/KeycloakSetup.java @@ -9,18 +9,21 @@ import io.micronaut.http.HttpResponse; import io.micronaut.http.client.HttpClient; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.sql.SQLException; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is responsible for setting up the Keycloak server. It initializes and configures the * server according to the provided specifications. */ @Singleton -@Slf4j @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") public class KeycloakSetup { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final HttpClient httpClient; private final KeycloakServer keycloakServer; private final AirbyteKeycloakConfiguration keycloakConfiguration; diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserConfigurator.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserConfigurator.java index 7ab191735e8..67324b4645a 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserConfigurator.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/UserConfigurator.java @@ -7,20 +7,23 @@ import io.airbyte.commons.auth.config.InitialUserConfig; import jakarta.inject.Singleton; import jakarta.ws.rs.core.Response; +import java.lang.invoke.MethodHandles; import java.util.Arrays; import java.util.Optional; -import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.representations.idm.CredentialRepresentation; import org.keycloak.representations.idm.UserRepresentation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class is responsible for user creation. It includes methods to create user credentials. */ @Singleton -@Slf4j public class UserConfigurator { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static final int HTTP_STATUS_CREATED = 201; private final InitialUserConfig initialUserConfig; diff --git a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientConfigurator.java b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientConfigurator.java index 6b802c5dae5..d2c148a5115 100644 --- a/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientConfigurator.java +++ b/airbyte-keycloak-setup/src/main/java/io/airbyte/keycloak/setup/WebClientConfigurator.java @@ -8,22 +8,25 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import jakarta.ws.rs.core.Response; +import java.lang.invoke.MethodHandles; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; -import lombok.extern.slf4j.Slf4j; import org.keycloak.admin.client.resource.RealmResource; import org.keycloak.representations.idm.ClientRepresentation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This class provides a web client. It can create and configure the client based on specified * parameters. */ @Singleton -@Slf4j public class WebClientConfigurator { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static final int HTTP_STATUS_CREATED = 201; private static final String LOCAL_OSS_DEV_URI = "https://localhost:3000/*"; private static final String LOCAL_CLOUD_DEV_URI = "https://localhost:3001/*"; diff --git a/airbyte-mappers/src/main/kotlin/io/airbyte/mappers/helpers/MapperHelper.kt b/airbyte-mappers/src/main/kotlin/io/airbyte/mappers/helpers/MapperHelper.kt index 15d003026f3..8f56c76faf8 100644 --- a/airbyte-mappers/src/main/kotlin/io/airbyte/mappers/helpers/MapperHelper.kt +++ b/airbyte-mappers/src/main/kotlin/io/airbyte/mappers/helpers/MapperHelper.kt @@ -5,6 +5,7 @@ import io.airbyte.config.mapper.configs.HashingConfig import io.airbyte.config.mapper.configs.HashingMapperConfig import io.airbyte.config.mapper.configs.HashingMethods import io.airbyte.mappers.transformations.HashingMapper +import java.util.UUID internal const val DEFAULT_HASHING_METHOD = HashingMapper.SHA256 internal const val DEFAULT_HASHING_SUFFIX = "_hashed" @@ -12,8 +13,12 @@ internal const val DEFAULT_HASHING_SUFFIX = "_hashed" /** * Create a hashing mapper for a given field. */ -fun createHashingMapper(fieldName: String): HashingMapperConfig { +fun createHashingMapper( + fieldName: String, + id: UUID? = null, +): HashingMapperConfig { return HashingMapperConfig( + id = id, name = MapperOperationName.HASHING, config = HashingConfig( @@ -24,6 +29,10 @@ fun createHashingMapper(fieldName: String): HashingMapperConfig { ) } +fun createHashingMapper(fieldName: String): HashingMapperConfig { + return createHashingMapper(fieldName, null) +} + /** * Get the name of the field that is being hashed from a hashing mapper. */ diff --git a/airbyte-mappers/src/test/kotlin/io/airbyte/mappers/application/RecordMapperTest.kt b/airbyte-mappers/src/test/kotlin/io/airbyte/mappers/application/RecordMapperTest.kt index 1880ca917d6..a7c68a8144f 100644 --- a/airbyte-mappers/src/test/kotlin/io/airbyte/mappers/application/RecordMapperTest.kt +++ b/airbyte-mappers/src/test/kotlin/io/airbyte/mappers/application/RecordMapperTest.kt @@ -35,8 +35,8 @@ class RecordMapperTest { recordMapper.applyMappers( testRecord, listOf( - TestMapperConfig(TEST_MAPPER_NAME, null, TestConfig("field1", TestEnums.ONE, "field2")), - TestMapperConfig(TEST_MAPPER_NAME, null, TestConfig("field1_test", TestEnums.ONE, "field2")), + TestMapperConfig(TEST_MAPPER_NAME, null, null, TestConfig("field1", TestEnums.ONE, "field2")), + TestMapperConfig(TEST_MAPPER_NAME, null, null, TestConfig("field1_test", TestEnums.ONE, "field2")), ), ) diff --git a/airbyte-mappers/src/test/kotlin/io/airbyte/mappers/transformations/DestinationCatalogGeneratorTest.kt b/airbyte-mappers/src/test/kotlin/io/airbyte/mappers/transformations/DestinationCatalogGeneratorTest.kt index 954ea55c759..ff7dd8d11cd 100644 --- a/airbyte-mappers/src/test/kotlin/io/airbyte/mappers/transformations/DestinationCatalogGeneratorTest.kt +++ b/airbyte-mappers/src/test/kotlin/io/airbyte/mappers/transformations/DestinationCatalogGeneratorTest.kt @@ -319,7 +319,7 @@ class DestinationCatalogGeneratorTest { @Test fun `test generateDestinationCatalogMissingMapper`() { - val mapperConfig = TestMapperConfig(TEST_MAPPER_NAME, null, TestConfig("", TestEnums.ONE, "")) + val mapperConfig = TestMapperConfig(TEST_MAPPER_NAME, null, null, TestConfig("", TestEnums.ONE, "")) val configuredUsersStream = ConfiguredAirbyteStream( stream = @@ -353,7 +353,7 @@ class DestinationCatalogGeneratorTest { @Test fun `test generateDestinationCatalogFailedSchema`() { - val mapperConfig = TestMapperConfig(TEST_MAPPER_NAME, null, TestConfig("field1", TestEnums.ONE, "field2")) + val mapperConfig = TestMapperConfig(TEST_MAPPER_NAME, null, null, TestConfig("field1", TestEnums.ONE, "field2")) val configuredUsersStream = ConfiguredAirbyteStream( stream = @@ -424,8 +424,8 @@ class DestinationCatalogGeneratorTest { ), mappers = listOf( - TestMapperConfig(TEST_MAPPER_NAME, null, TestConfig("", TestEnums.ONE, "")), - TestMapperConfig(TEST_MAPPER_NAME, null, TestConfig("", TestEnums.TWO, "")), + TestMapperConfig(TEST_MAPPER_NAME, null, null, TestConfig("", TestEnums.ONE, "")), + TestMapperConfig(TEST_MAPPER_NAME, null, null, TestConfig("", TestEnums.TWO, "")), ), ) @@ -466,7 +466,7 @@ class DestinationCatalogGeneratorTest { Field(name = "field1_1", type = FieldType.STRING), Field(name = "field1_2", type = FieldType.DATE), ), - mappers = listOf(TestMapperConfig(TEST_MAPPER_NAME, null, TestConfig("", TestEnums.TWO, ""))), + mappers = listOf(TestMapperConfig(TEST_MAPPER_NAME, null, null, TestConfig("", TestEnums.TWO, ""))), ) val configuredUsersStream2 = @@ -491,7 +491,7 @@ class DestinationCatalogGeneratorTest { listOf( Field(name = "field2_1", type = FieldType.INTEGER), ), - mappers = listOf(TestMapperConfig(TEST_MAPPER_NAME, null, TestConfig("", TestEnums.TWO, ""))), + mappers = listOf(TestMapperConfig(TEST_MAPPER_NAME, null, null, TestConfig("", TestEnums.TWO, ""))), ) val catalog = ConfiguredAirbyteCatalog(streams = listOf(configuredUsersStream1, configuredUsersStream2)) diff --git a/airbyte-metrics/metrics-lib/build.gradle.kts b/airbyte-metrics/metrics-lib/build.gradle.kts index 391fab34bf7..8755dd45fc8 100644 --- a/airbyte-metrics/metrics-lib/build.gradle.kts +++ b/airbyte-metrics/metrics-lib/build.gradle.kts @@ -4,9 +4,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - ksp(libs.bundles.micronaut.annotation.processor) api(libs.bundles.micronaut.metrics) diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java index ff3f3571e07..aa69ecb9c0f 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/ApmTraceConstants.java @@ -46,6 +46,12 @@ public static final class Tags { */ public static final String ATTEMPT_NUMBER_KEY = "attempt_number"; + /** + * Name of the APM trace tag that holds the connector builder project id value associated with the + * trace. + */ + public static final String CONNECTOR_BUILDER_PROJECT_ID_KEY = "connector_builder_project_id"; + /** * Name of the APM trace tag that holds the destination Docker image value associated with the * trace. diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricClient.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricClient.java index 3008a3a4647..83a27147dd6 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricClient.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/DogStatsDMetricClient.java @@ -7,9 +7,11 @@ import com.google.common.annotations.VisibleForTesting; import com.timgroup.statsd.NonBlockingStatsDClientBuilder; import com.timgroup.statsd.StatsDClient; +import java.lang.invoke.MethodHandles; import java.util.stream.Collectors; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Light wrapper around the DogsStatsD client to make using the client slightly more ergonomic. @@ -26,9 +28,10 @@ * the {@code key} and {@code value} property of each {@link MetricAttribute} with a * {@link #TAG_DELIMITER} delimiter. */ -@Slf4j public class DogStatsDMetricClient implements MetricClient { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String TAG_DELIMITER = ":"; private boolean instancePublish = false; diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java index 958ba1f3542..bf0c61d2c29 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/MetricEmittingApps.java @@ -4,8 +4,6 @@ package io.airbyte.metrics.lib; -import lombok.AllArgsConstructor; - /** * Enum containing all applications metrics are emitted for. Used to initialize * MetricClientFactory.initialize(...). @@ -21,7 +19,6 @@ * Note: These names are used as metric name prefixes. Changing these names will affect * dashboard/alerts and our public Datadog integration. Please consult the platform teams if unsure. */ -@AllArgsConstructor public enum MetricEmittingApps implements MetricEmittingApp { BILLING("billing"), @@ -38,6 +35,10 @@ public enum MetricEmittingApps implements MetricEmittingApp { private final String applicationName; + MetricEmittingApps(final String applicationName) { + this.applicationName = applicationName; + } + @Override public String getApplicationName() { return this.applicationName; diff --git a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java index e5429e9cc45..818e4bbecd2 100644 --- a/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java +++ b/airbyte-metrics/metrics-lib/src/main/java/io/airbyte/metrics/lib/OssMetricsRegistry.java @@ -257,6 +257,9 @@ public enum OssMetricsRegistry implements MetricsRegistry { "running_pods_found_for_connection_id", "whether we found pods running for a given connection id when attempting to start a sync for that connection id"), + REPLICATION_THROUGHPUT_BPS(MetricEmittingApps.WORKER, + "replication_throughput_bps", + "throughput of replication in bytes per second"), REPLICATION_BYTES_SYNCED(MetricEmittingApps.WORKER, "replication_bytes_synced", "number of bytes synced during replication"), diff --git a/airbyte-notification/build.gradle.kts b/airbyte-notification/build.gradle.kts index 4b6d1896110..19cfd16955a 100644 --- a/airbyte-notification/build.gradle.kts +++ b/airbyte-notification/build.gradle.kts @@ -4,9 +4,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - ksp(libs.bundles.micronaut.annotation.processor) implementation(project(":oss:airbyte-api:server-api")) diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailConfigFetcherImpl.java b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailConfigFetcherImpl.java index abfa646c86d..1fb3b712095 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailConfigFetcherImpl.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailConfigFetcherImpl.java @@ -9,10 +9,12 @@ import io.micronaut.context.annotation.Requires; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Fetch the configuration to send a notification using customerIo. @@ -20,9 +22,10 @@ @Singleton @Requires(property = "airbyte.notification.customerio.apikey", notEquals = "") -@Slf4j public class CustomerIoEmailConfigFetcherImpl implements CustomerIoEmailConfigFetcher { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AirbyteApiClient airbyteApiClient; public CustomerIoEmailConfigFetcherImpl(final AirbyteApiClient airbyteApiClient) { diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailNotificationSender.java b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailNotificationSender.java index c24af3aee26..d9a11f5e0a1 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailNotificationSender.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/CustomerIoEmailNotificationSender.java @@ -11,7 +11,7 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; -import lombok.extern.slf4j.Slf4j; +import java.lang.invoke.MethodHandles; import okhttp3.MediaType; import okhttp3.OkHttpClient; import okhttp3.Request; @@ -19,11 +19,12 @@ import okhttp3.Response; import org.apache.http.HttpHeaders; import org.jetbrains.annotations.NotNull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Send a notification using customerIo. */ -@Slf4j @Singleton @Requires(property = "airbyte.notification.customerio.apikey", notEquals = "") @@ -31,6 +32,8 @@ @SuppressWarnings({"PMD.ExceptionAsFlowControl", "PMD.ConfusingArgumentToVarargsMethod"}) public class CustomerIoEmailNotificationSender implements NotificationSender { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public static final MediaType JSON = MediaType.get("application/json; charset=utf-8"); private static final String CUSTOMER_IO_URL = "https://api.customer.io/v1/send/email"; diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/FakeCustomerIoEmailNotificationSender.java b/airbyte-notification/src/main/java/io/airbyte/notification/FakeCustomerIoEmailNotificationSender.java index d5f4beea51d..bc8dbd14bb5 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/FakeCustomerIoEmailNotificationSender.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/FakeCustomerIoEmailNotificationSender.java @@ -5,15 +5,18 @@ package io.airbyte.notification; import jakarta.inject.Singleton; -import lombok.extern.slf4j.Slf4j; +import java.lang.invoke.MethodHandles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Fake client for customerIoNotification - this will not send any actual emails. */ -@Slf4j @Singleton public class FakeCustomerIoEmailNotificationSender extends CustomerIoEmailNotificationSender { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + FakeCustomerIoEmailNotificationSender() { super(null, null); } diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/WorkspaceNotificationConfigFetcher.java b/airbyte-notification/src/main/java/io/airbyte/notification/WorkspaceNotificationConfigFetcher.java index e3ca541daf5..253a9a19a53 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/WorkspaceNotificationConfigFetcher.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/WorkspaceNotificationConfigFetcher.java @@ -10,16 +10,19 @@ import io.airbyte.api.client.model.generated.WorkspaceRead; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Fetching notification settings from workspace. */ @Singleton -@Slf4j public class WorkspaceNotificationConfigFetcher { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AirbyteApiClient airbyteApiClient; public WorkspaceNotificationConfigFetcher(final AirbyteApiClient airbyteApiClient) { diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/messages/ConnectionInfo.java b/airbyte-notification/src/main/java/io/airbyte/notification/messages/ConnectionInfo.java deleted file mode 100644 index 9d619e6ba66..00000000000 --- a/airbyte-notification/src/main/java/io/airbyte/notification/messages/ConnectionInfo.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.notification.messages; - -import java.util.UUID; -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -@Getter -@Setter -@Builder -public class ConnectionInfo { - - private UUID id; - private String name; - private String url; - -} diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/messages/DestinationInfo.java b/airbyte-notification/src/main/java/io/airbyte/notification/messages/DestinationInfo.java deleted file mode 100644 index 609f218efcc..00000000000 --- a/airbyte-notification/src/main/java/io/airbyte/notification/messages/DestinationInfo.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.notification.messages; - -import java.util.UUID; -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -@Getter -@Setter -@Builder -public class DestinationInfo { - - private UUID id; - private String name; - private String url; - -} diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/messages/SchemaUpdateNotification.java b/airbyte-notification/src/main/java/io/airbyte/notification/messages/SchemaUpdateNotification.java deleted file mode 100644 index af79e44b2c3..00000000000 --- a/airbyte-notification/src/main/java/io/airbyte/notification/messages/SchemaUpdateNotification.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.notification.messages; - -import io.airbyte.api.model.generated.CatalogDiff; -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -@Getter -@Setter -@Builder -public class SchemaUpdateNotification { - - private WorkspaceInfo workspace; - - private ConnectionInfo connectionInfo; - - private SourceInfo sourceInfo; - - private boolean isBreakingChange; - - private CatalogDiff catalogDiff; - -} diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/messages/SourceInfo.java b/airbyte-notification/src/main/java/io/airbyte/notification/messages/SourceInfo.java deleted file mode 100644 index 6c46a0c048c..00000000000 --- a/airbyte-notification/src/main/java/io/airbyte/notification/messages/SourceInfo.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.notification.messages; - -import java.util.UUID; -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -@Getter -@Setter -@Builder -public class SourceInfo { - - private UUID id; - private String name; - private String url; - -} diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/messages/SyncSummary.java b/airbyte-notification/src/main/java/io/airbyte/notification/messages/SyncSummary.java deleted file mode 100644 index 6a7acfa21a7..00000000000 --- a/airbyte-notification/src/main/java/io/airbyte/notification/messages/SyncSummary.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.notification.messages; - -import java.time.Duration; -import java.time.Instant; -import java.util.List; -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -@Getter -@Setter -@Builder -public class SyncSummary { - - private WorkspaceInfo workspace; - - private ConnectionInfo connection; - - private SourceInfo source; - - private DestinationInfo destination; - - private Long jobId; - - private boolean isSuccess; - - private Instant startedAt; - - private Instant finishedAt; - - private long bytesEmitted; - - private long bytesCommitted; - - private long recordsEmitted; - - private long recordsCommitted; - - private long recordsFilteredOut; - - private long bytesFilteredOut; - - private String errorMessage; - - private static String formatVolume(final long bytes) { - long currentValue = bytes; - for (String unit : List.of("B", "kB", "MB", "GB")) { - var byteLimit = 1024; - if (currentValue < byteLimit) { - return String.format("%d %s", currentValue, unit); - } - currentValue = currentValue / byteLimit; - } - return String.format("%d TB", currentValue); - } - - private static String formatDuration(final Instant start, final Instant end) { - Duration duration = Duration.between(start, end); - if (duration.toMinutes() == 0) { - return String.format("%d sec", duration.toSecondsPart()); - } else if (duration.toHours() == 0) { - return String.format("%d min %d sec", duration.toMinutesPart(), duration.toSecondsPart()); - } else if (duration.toDays() == 0) { - return String.format("%d hours %d min", duration.toHoursPart(), duration.toMinutesPart()); - } - return String.format("%d days %d hours", duration.toDays(), duration.toHoursPart()); - } - - public Long getDurationInSeconds() { - if (startedAt != null && finishedAt != null) { - return Duration.between(startedAt, finishedAt).getSeconds(); - } - return null; - } - - public String getDurationFormatted() { - if (startedAt != null && finishedAt != null) { - return formatDuration(startedAt, finishedAt); - } - return null; - } - - public String getBytesEmittedFormatted() { - return formatVolume(bytesEmitted); - } - - public String getBytesCommittedFormatted() { - return formatVolume(bytesCommitted); - } - -} diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/messages/WorkspaceInfo.java b/airbyte-notification/src/main/java/io/airbyte/notification/messages/WorkspaceInfo.java deleted file mode 100644 index c89643f6168..00000000000 --- a/airbyte-notification/src/main/java/io/airbyte/notification/messages/WorkspaceInfo.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.notification.messages; - -import java.util.UUID; -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -@Getter -@Setter -@Builder -public class WorkspaceInfo { - - private UUID id; - private String name; - private String url; - -} diff --git a/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/ConnectionInfo.kt b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/ConnectionInfo.kt new file mode 100644 index 00000000000..143cef4193a --- /dev/null +++ b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/ConnectionInfo.kt @@ -0,0 +1,9 @@ +package io.airbyte.notification.messages + +import java.util.UUID + +data class ConnectionInfo( + val id: UUID? = null, + val name: String? = null, + val url: String? = null, +) diff --git a/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/DestinationInfo.kt b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/DestinationInfo.kt new file mode 100644 index 00000000000..91a75dcc75b --- /dev/null +++ b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/DestinationInfo.kt @@ -0,0 +1,9 @@ +package io.airbyte.notification.messages + +import java.util.UUID + +data class DestinationInfo( + val id: UUID?, + val name: String?, + val url: String?, +) diff --git a/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SchemaUpdateNotification.kt b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SchemaUpdateNotification.kt new file mode 100644 index 00000000000..896b59d413a --- /dev/null +++ b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SchemaUpdateNotification.kt @@ -0,0 +1,11 @@ +package io.airbyte.notification.messages + +import io.airbyte.api.model.generated.CatalogDiff + +data class SchemaUpdateNotification( + val workspace: WorkspaceInfo, + val connectionInfo: ConnectionInfo, + val sourceInfo: SourceInfo, + val isBreakingChange: Boolean, + val catalogDiff: CatalogDiff, +) diff --git a/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SourceInfo.kt b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SourceInfo.kt new file mode 100644 index 00000000000..83d787b5bcd --- /dev/null +++ b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SourceInfo.kt @@ -0,0 +1,9 @@ +package io.airbyte.notification.messages + +import java.util.UUID + +data class SourceInfo( + val id: UUID?, + val name: String?, + val url: String?, +) diff --git a/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SyncSummary.kt b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SyncSummary.kt new file mode 100644 index 00000000000..e124d64d145 --- /dev/null +++ b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/SyncSummary.kt @@ -0,0 +1,64 @@ +package io.airbyte.notification.messages + +import java.time.Duration +import java.time.Instant + +data class SyncSummary( + val workspace: WorkspaceInfo, + val connection: ConnectionInfo, + val source: SourceInfo, + val destination: DestinationInfo, + val jobId: Long, + val isSuccess: Boolean, + val startedAt: Instant?, + val finishedAt: Instant?, + val bytesEmitted: Long, + val bytesCommitted: Long, + val recordsEmitted: Long, + val recordsCommitted: Long, + val recordsFilteredOut: Long = 0, + val bytesFilteredOut: Long = 0, + val errorMessage: String? = null, +) { + fun getDurationInSeconds(): Long? = + when { + startedAt != null && finishedAt != null -> Duration.between(startedAt, finishedAt).seconds + else -> null + } + + fun getDurationFormatted(): String? = + when { + startedAt != null && finishedAt != null -> formatDuration(startedAt, finishedAt) + else -> null + } + + fun getBytesEmittedFormatted() = formatVolume(bytesEmitted) + + fun getBytesCommittedFormatted() = formatVolume(bytesCommitted) +} + +private fun formatDuration( + start: Instant, + end: Instant, +): String { + val duration = Duration.between(start, end) + return when { + duration.toMinutes() == 0L -> "${duration.toSecondsPart()} sec" + duration.toHours() == 0L -> "${duration.toMinutesPart()} min ${duration.toSecondsPart()} sec" + duration.toDays() == 0L -> "${duration.toHoursPart()} hours ${duration.toMinutesPart()} min" + else -> "${duration.toDays()} days ${duration.toHours()} hours" + } +} + +private fun formatVolume(bytes: Long): String { + var currentValue = bytes + val byteLimit = 1024 + listOf("B", "kB", "MB", "GB").forEach { unit -> + if (currentValue < byteLimit) { + return "$currentValue $unit" + } + currentValue /= byteLimit + } + + return "$currentValue TB" +} diff --git a/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/WorkspaceInfo.kt b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/WorkspaceInfo.kt new file mode 100644 index 00000000000..9869e2205b6 --- /dev/null +++ b/airbyte-notification/src/main/kotlin/io/airbyte/notification/messages/WorkspaceInfo.kt @@ -0,0 +1,9 @@ +package io.airbyte.notification.messages + +import java.util.UUID + +data class WorkspaceInfo( + val id: UUID?, + val name: String?, + val url: String?, +) diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/CustomerIoEmailConfigFetcherTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/CustomerIoEmailConfigFetcherTest.java index 9af457b7886..027fb2d0712 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/CustomerIoEmailConfigFetcherTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/CustomerIoEmailConfigFetcherTest.java @@ -40,7 +40,7 @@ void testReturnTheRightConfig() throws IOException { final String email = "em@il.com"; when(workspaceApi.getWorkspaceByConnectionId(new ConnectionIdRequestBody(connectionId))) .thenReturn(new WorkspaceRead(UUID.randomUUID(), UUID.randomUUID(), "name", "slug", true, UUID.randomUUID(), email, null, null, null, null, - null, null, null, null, null, null, null)); + null, null, null, null, null, null, null, null)); CustomerIoEmailConfig customerIoEmailConfig = cloudCustomerIoEmailConfigFetcher.fetchConfig(connectionId); assertEquals(email, customerIoEmailConfig.getTo()); diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java index 975990e3522..105dff148b5 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/CustomerioNotificationClientTest.java @@ -178,20 +178,22 @@ void testNotifyBreakingChangeSyncsDisabled() throws IOException, InterruptedExce void testNotifyConnectionDisabled() throws IOException, InterruptedException { mockWebServer.enqueue(new MockResponse()); - SyncSummary summary = SyncSummary.builder() - .workspace(WorkspaceInfo.builder().id(WORKSPACE_ID).build()) - .destination(DestinationInfo.builder().name(RANDOM_INPUT).build()) - .source(SourceInfo.builder().name(RANDOM_INPUT).build()) - .connection(ConnectionInfo.builder().id(CONNECTION_ID).build()) - .startedAt(Instant.ofEpochSecond(1000000)) - .finishedAt(Instant.ofEpochSecond(2000000)) - .isSuccess(false) - .bytesEmitted(123240L) - .bytesCommitted(9000L) - .recordsEmitted(780) - .recordsCommitted(600) - .errorMessage(RANDOM_INPUT) - .build(); + SyncSummary summary = new SyncSummary( + new WorkspaceInfo(WORKSPACE_ID, null, null), + new ConnectionInfo(CONNECTION_ID, null, null), + new SourceInfo(null, RANDOM_INPUT, null), + new DestinationInfo(null, RANDOM_INPUT, null), + 10L, + false, + Instant.ofEpochSecond(1000000), + Instant.ofEpochSecond(2000000), + 123240L, + 9000L, + 780, + 600, + 0, + 0, + RANDOM_INPUT); final boolean result = customerioNotificationClient.notifyConnectionDisabled(summary, WORKSPACE.getEmail()); @@ -231,12 +233,13 @@ void testBuildSchemaNotificationMessageData() { .streamDescriptor(new StreamDescriptor().name("stream_with_added_pk"))); String recipient = "airbyte@airbyte.io"; String transactionMessageId = "455"; - SchemaUpdateNotification notification = SchemaUpdateNotification.builder() - .workspace(WorkspaceInfo.builder().id(workspaceId).name(workspaceName).build()) - .connectionInfo(ConnectionInfo.builder().id(connectionId).name(connectionName).build()) - .sourceInfo(SourceInfo.builder().id(sourceId).name(sourceName).build()) - .catalogDiff(diff) - .build(); + SchemaUpdateNotification notification = new SchemaUpdateNotification( + new WorkspaceInfo(workspaceId, workspaceName, null), + new ConnectionInfo(connectionId, connectionName, null), + new SourceInfo(sourceId, sourceName, null), + false, + diff); + ObjectNode node = CustomerioNotificationClient.buildSchemaChangeJson(notification, recipient, transactionMessageId); @@ -267,23 +270,22 @@ void testBuildJobSuccessNotificationMessageData() throws IOException { Instant startedAt = Instant.ofEpochSecond(1000000); Instant finishedAt = Instant.ofEpochSecond(1070000); - SyncSummary syncSummary = SyncSummary.builder() - .workspace(WorkspaceInfo.builder().id(workspaceId).name(workspaceName).url("http://workspace").build()) - .source(SourceInfo.builder().id(sourceId).name(sourceName).url("http://source").build()) - .destination(DestinationInfo.builder().id(destinationId).name(destinationName).url("http://source").build()) - .connection(ConnectionInfo.builder().id(connectionId).name(connectionName).url("http://connection").build()) - .jobId(100L) - .isSuccess(false) - .errorMessage("Connection to the source failed") - .startedAt(startedAt) - .finishedAt(finishedAt) - .bytesEmitted(1000L) - .bytesCommitted(9000L) - .recordsFilteredOut(0L) - .bytesFilteredOut(0L) - .recordsEmitted(50) - .recordsCommitted(48) - .build(); + SyncSummary syncSummary = new SyncSummary( + new WorkspaceInfo(workspaceId, workspaceName, "http://workspace"), + new ConnectionInfo(connectionId, connectionName, "http://connection"), + new SourceInfo(sourceId, sourceName, "http://source"), + new DestinationInfo(destinationId, destinationName, "http://source"), + 100L, + false, + startedAt, + finishedAt, + 1000L, + 9000L, + 50, + 48, + 0L, + 0L, + "Connection to the source failed"); String email = "joe@foobar.com"; String transactionId = "201"; @@ -298,6 +300,7 @@ void testBuildJobSuccessNotificationMessageData() throws IOException { JsonNode expected = mapper.readTree(jsonContent); ObjectNode node = CustomerioNotificationClient.buildSyncCompletedJson(syncSummary, email, transactionId); assertEquals(expected.get("message_data").get("bytesEmitted"), node.get("message_data").get("bytesEmitted")); + assertEquals(expected.size(), node.size()); assertEquals(expected, node); } diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java index e54fb6e0fd0..5a669335619 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/SlackNotificationClientTest.java @@ -86,19 +86,17 @@ void tearDown() { void testBadWebhookUrl() { final SlackNotificationClient client = new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + "/bad")); - final SyncSummary summary = SyncSummary.builder() - .connection(ConnectionInfo.builder() - .name(CONNECTION_NAME).id(UUID.randomUUID()).url(LOG_URL).build()) - .source(SourceInfo.builder() - .name(SOURCE_TEST).id(UUID.randomUUID()).url("http://source").build()) - .destination(DestinationInfo.builder() - .name(DESTINATION_TEST).id(UUID.randomUUID()).url("http://destination").build()) - .errorMessage("") - .jobId(JOB_ID) - .isSuccess(true) - .startedAt(Instant.MIN) - .finishedAt(Instant.MAX) - .build(); + final SyncSummary summary = new SyncSummary( + new WorkspaceInfo(null, null, null), + new ConnectionInfo(UUID.randomUUID(), CONNECTION_NAME, LOG_URL), + new SourceInfo(UUID.randomUUID(), SOURCE_TEST, "http://source"), + new DestinationInfo(UUID.randomUUID(), DESTINATION_TEST, "http://destination"), + JOB_ID, + true, + Instant.MIN, + Instant.MAX, + 0, 0, 0, 0, 0, 0, + ""); assertFalse(client.notifyJobFailure(summary, null)); } @@ -106,32 +104,32 @@ void testBadWebhookUrl() { void testEmptyWebhookUrl() throws IOException, InterruptedException { final SlackNotificationClient client = new SlackNotificationClient(new SlackNotificationConfiguration()); - final SyncSummary summary = SyncSummary.builder() - .connection(ConnectionInfo.builder() - .name(CONNECTION_NAME).id(UUID.randomUUID()).url(LOG_URL).build()) - .source(SourceInfo.builder() - .name(SOURCE_TEST).id(UUID.randomUUID()).url("http://source").build()) - .destination(DestinationInfo.builder() - .name(DESTINATION_TEST).id(UUID.randomUUID()).url("http://destination").build()) - .errorMessage("Job timed out") - .jobId(JOB_ID) - .build(); + final SyncSummary summary = new SyncSummary( + new WorkspaceInfo(null, null, null), + new ConnectionInfo(UUID.randomUUID(), CONNECTION_NAME, LOG_URL), + new SourceInfo(UUID.randomUUID(), SOURCE_TEST, "http://source"), + new DestinationInfo(UUID.randomUUID(), DESTINATION_TEST, "http://destination"), + JOB_ID, + false, + null, null, + 0, 0, 0, 0, 0, 0, + JOB_DESCRIPTION); assertFalse(client.notifyJobFailure(summary, null)); } @Test void testNotifyJobFailure() throws IOException, InterruptedException { server.createContext(TEST_PATH, new ServerHandler(EXPECTED_FAIL_MESSAGE)); - final SyncSummary summary = SyncSummary.builder() - .connection(ConnectionInfo.builder() - .name(CONNECTION_NAME).id(UUID.randomUUID()).url(LOG_URL).build()) - .source(SourceInfo.builder() - .name(SOURCE_TEST).id(UUID.randomUUID()).url("http://source").build()) - .destination(DestinationInfo.builder() - .name(DESTINATION_TEST).id(UUID.randomUUID()).url("http://destination").build()) - .errorMessage(JOB_DESCRIPTION) - .jobId(JOB_ID) - .build(); + final SyncSummary summary = new SyncSummary( + new WorkspaceInfo(null, null, null), + new ConnectionInfo(UUID.randomUUID(), CONNECTION_NAME, LOG_URL), + new SourceInfo(UUID.randomUUID(), SOURCE_TEST, "http://source"), + new DestinationInfo(UUID.randomUUID(), DESTINATION_TEST, "http://destination"), + JOB_ID, + false, + null, null, + 0, 0, 0, 0, 0, 0, + JOB_DESCRIPTION); final SlackNotificationClient client = new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); assertTrue(client.notifyJobFailure(summary, null)); @@ -140,16 +138,16 @@ void testNotifyJobFailure() throws IOException, InterruptedException { @Test void testNotifyJobSuccess() throws IOException, InterruptedException { server.createContext(TEST_PATH, new ServerHandler(EXPECTED_SUCCESS_MESSAGE)); - final SyncSummary summary = SyncSummary.builder() - .connection(ConnectionInfo.builder() - .name(CONNECTION_NAME).id(UUID.randomUUID()).url(LOG_URL).build()) - .source(SourceInfo.builder() - .name(SOURCE_TEST).id(UUID.randomUUID()).url("http://source").build()) - .destination(DestinationInfo.builder() - .name(DESTINATION_TEST).id(UUID.randomUUID()).url("http://destination").build()) - .errorMessage(JOB_DESCRIPTION) - .jobId(JOB_ID) - .build(); + final SyncSummary summary = new SyncSummary( + new WorkspaceInfo(null, null, null), + new ConnectionInfo(UUID.randomUUID(), CONNECTION_NAME, LOG_URL), + new SourceInfo(UUID.randomUUID(), SOURCE_TEST, "http://source"), + new DestinationInfo(UUID.randomUUID(), DESTINATION_TEST, "http://destination"), + JOB_ID, + false, + null, null, + 0, 0, 0, 0, 0, 0, + JOB_DESCRIPTION); final SlackNotificationClient client = new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); assertTrue(client.notifyJobSuccess(summary, null)); @@ -172,13 +170,17 @@ void testNotifyConnectionDisabled() throws IOException, InterruptedException { server.createContext(TEST_PATH, new ServerHandler(expectedNotificationMessage)); final SlackNotificationClient client = new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); - final SyncSummary summary = SyncSummary.builder() - .workspace(WorkspaceInfo.builder().id(WORKSPACE_ID).build()) - .destination(DestinationInfo.builder().name(DESTINATION_TEST).build()) - .source(SourceInfo.builder().name(SOURCE_TEST).build()) - .connection(ConnectionInfo.builder().id(CONNECTION_ID).name(CONNECTION_NAME).url("http://connection").build()) - .errorMessage("job description.") - .build(); + final SyncSummary summary = new SyncSummary( + new WorkspaceInfo(WORKSPACE_ID, null, null), + new ConnectionInfo(CONNECTION_ID, CONNECTION_NAME, "http://connection"), + new SourceInfo(null, SOURCE_TEST, null), + new DestinationInfo(null, DESTINATION_TEST, null), + 0, + false, + null, + null, + 0, 0, 0, 0, 0, 0, + "job description."); assertTrue(client.notifyConnectionDisabled(summary, "")); } @@ -199,13 +201,18 @@ void testNotifyConnectionDisabledWarning() throws IOException, InterruptedExcept server.createContext(TEST_PATH, new ServerHandler(expectedNotificationWarningMessage)); final SlackNotificationClient client = new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); - final SyncSummary summary = SyncSummary.builder() - .workspace(WorkspaceInfo.builder().id(WORKSPACE_ID).build()) - .destination(DestinationInfo.builder().name(DESTINATION_TEST).build()) - .source(SourceInfo.builder().name(SOURCE_TEST).build()) - .connection(ConnectionInfo.builder().id(CONNECTION_ID).name(CONNECTION_NAME).url("http://connection").build()) - .errorMessage("job description.") - .build(); + final SyncSummary summary = new SyncSummary( + new WorkspaceInfo(WORKSPACE_ID, null, null), + new ConnectionInfo(CONNECTION_ID, CONNECTION_NAME, "http://connection"), + new SourceInfo(null, SOURCE_TEST, null), + new DestinationInfo(null, DESTINATION_TEST, null), + 0L, + false, + null, + null, + 0, 0, 0, 0, 0, 0, + "job description."); + assertTrue(client.notifyConnectionDisableWarning(summary, "")); } @@ -229,12 +236,12 @@ void testNotifySchemaPropagated() throws IOException, InterruptedException { new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); final UUID workpaceId = UUID.randomUUID(); - final SchemaUpdateNotification notification = SchemaUpdateNotification.builder() - .connectionInfo(ConnectionInfo.builder().name(connectionName).id(connectionId).url(connectionUrl).build()) - .workspace(WorkspaceInfo.builder().name(workspaceName).id(workpaceId).url(workspaceUrl).build()) - .catalogDiff(diff) - .isBreakingChange(isBreaking) - .sourceInfo(SourceInfo.builder().name(sourceName).id(sourceId).url(sourceUrl).build()).build(); + final SchemaUpdateNotification notification = new SchemaUpdateNotification( + new WorkspaceInfo(workpaceId, workspaceName, workspaceUrl), + new ConnectionInfo(connectionId, connectionName, connectionUrl), + new SourceInfo(sourceId, sourceName, sourceUrl), + isBreaking, + diff); assertTrue( client.notifySchemaPropagated(notification, recipient)); @@ -260,14 +267,14 @@ void testNotifySchemaDiffToApply() { new SlackNotificationClient(new SlackNotificationConfiguration().withWebhook(WEBHOOK_URL + server.getAddress().getPort() + TEST_PATH)); final UUID workpaceId = UUID.randomUUID(); - final SchemaUpdateNotification notification = SchemaUpdateNotification.builder() - .connectionInfo(ConnectionInfo.builder().name(connectionName).id(connectionId).url(connectionUrl).build()) - .workspace(WorkspaceInfo.builder().name(workspaceName).id(workpaceId).url(workspaceUrl).build()) - .catalogDiff(diff) - .isBreakingChange(isBreaking) - .sourceInfo(SourceInfo.builder().name(sourceName).id(sourceId).url(sourceUrl).build()).build(); - assertTrue( - client.notifySchemaDiffToApply(notification, recipient)); + final SchemaUpdateNotification notification = new SchemaUpdateNotification( + new WorkspaceInfo(workpaceId, workspaceName, workspaceUrl), + new ConnectionInfo(connectionId, connectionName, connectionUrl), + new SourceInfo(sourceId, sourceName, sourceUrl), + isBreaking, + diff); + + assertTrue(client.notifySchemaDiffToApply(notification, recipient)); } @Test diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/WorkspaceNotificationConfigFetcherTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/WorkspaceNotificationConfigFetcherTest.java index 130f12e83f0..e4ef9716fe2 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/WorkspaceNotificationConfigFetcherTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/WorkspaceNotificationConfigFetcherTest.java @@ -44,7 +44,7 @@ void testReturnTheRightConfig() throws IOException { when(workspaceApi.getWorkspaceByConnectionId(new ConnectionIdRequestBody(connectionId))) .thenReturn( new WorkspaceRead(UUID.randomUUID(), UUID.randomUUID(), "name", "slug", true, UUID.randomUUID(), email, null, null, null, null, null, - new NotificationSettings(null, null, null, null, null, notificationItem, null, null), null, null, null, null, null)); + new NotificationSettings(null, null, null, null, null, notificationItem, null, null), null, null, null, null, null, null)); NotificationItemWithCustomerIoConfig result = workspaceNotificationConfigFetcher.fetchNotificationConfig(connectionId, NotificationEvent.ON_BREAKING_CHANGE); diff --git a/airbyte-notification/src/test/java/io/airbyte/notification/slack/NotificationTest.java b/airbyte-notification/src/test/java/io/airbyte/notification/slack/NotificationTest.java index 93008dfc8aa..e577d08b56c 100644 --- a/airbyte-notification/src/test/java/io/airbyte/notification/slack/NotificationTest.java +++ b/airbyte-notification/src/test/java/io/airbyte/notification/slack/NotificationTest.java @@ -92,21 +92,22 @@ void testDataNode() throws JsonProcessingException { UUID destinationId = UUID.fromString("5621c38f-8048-4abb-85ca-b34ff8d9a298"); long jobId = 9988L; - SyncSummary syncSummary = SyncSummary.builder() - .workspace(WorkspaceInfo.builder().name("Workspace1").id(workspaceId).url("https://link/to/ws").build()) - .connection(ConnectionInfo.builder().name("Connection").id(connectionId).url("https://link/to/connection").build()) - .source(SourceInfo.builder().name("Source").id(sourceId).url("https://link/to/source").build()) - .destination(DestinationInfo.builder().name("Destination").id(destinationId).url("https://link/to/destination").build()) - .errorMessage("Something failed") - .jobId(jobId) - .isSuccess(false) - .startedAt(Instant.ofEpochSecond(1704067200)) - .finishedAt(Instant.ofEpochSecond(1704070800)) - .bytesEmitted(1000L) - .bytesCommitted(90L) - .recordsEmitted(89L) - .recordsCommitted(45L) - .build(); + SyncSummary syncSummary = new SyncSummary( + new WorkspaceInfo(workspaceId, "Workspace1", "https://link/to/ws"), + new ConnectionInfo(connectionId, "Connection", "https://link/to/connection"), + new SourceInfo(sourceId, "Source", "https://link/to/source"), + new DestinationInfo(destinationId, "Destination", "https://link/to/destination"), + jobId, + false, + Instant.ofEpochSecond(1704067200), + Instant.ofEpochSecond(1704070800), + 1000L, + 90L, + 89L, + 45L, + 0, + 0, + "Something failed"); Notification notification = new Notification(); notification.setData(syncSummary); diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuth2Flow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuth2Flow.java index fa32006a237..151b204b0f8 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuth2Flow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/BaseOAuth2Flow.java @@ -99,6 +99,30 @@ public BaseOAuth2Flow(final HttpClient httpClient, this.tokenReqContentType = tokenReqContentType; } + /** + * Retrieves the content type to be used for the token request. + * + * @param inputOAuthConfiguration the OAuth configuration as a JsonNode + * @return the content type for the token request, which is URL_ENCODED by default + */ + protected TokenRequestContentType getRequestContentType(final JsonNode inputOAuthConfiguration) { + return TokenRequestContentType.URL_ENCODED; + } + + /** + * Generates the consent URL for OAuth2 authentication for a given source. + * + * @param workspaceId the UUID of the workspace + * @param sourceDefinitionId the UUID of the source definition + * @param redirectUrl the URL to redirect to after authentication + * @param inputOAuthConfiguration the input OAuth configuration as a JsonNode + * @param oauthConfigSpecification the OAuth configuration specification + * @param sourceOAuthParamConfig the source OAuth parameter configuration as a JsonNode + * @return the formatted consent URL as a String + * @throws IOException if an I/O error occurs + * @throws JsonValidationException if the input OAuth configuration is invalid + * @throws ResourceNotFoundProblem if the source OAuth parameter configuration is null + */ @Override public String getSourceConsentUrl(final UUID workspaceId, final UUID sourceDefinitionId, @@ -122,6 +146,20 @@ public String getSourceConsentUrl(final UUID workspaceId, Jsons.mergeNodes(inputOAuthConfiguration, getOAuthDeclarativeInputSpec(oauthConfigSpecification))); } + /** + * Generates the consent URL for OAuth2 authorization for a destination. + * + * @param workspaceId the ID of the workspace requesting the consent URL + * @param destinationDefinitionId the ID of the destination definition + * @param redirectUrl the URL to redirect to after authorization + * @param inputOAuthConfiguration the OAuth configuration input provided by the user + * @param oauthConfigSpecification the specification for the OAuth configuration + * @param destinationOAuthParamConfig the OAuth parameters configuration for the destination + * @return the formatted consent URL for OAuth2 authorization + * @throws IOException if an I/O error occurs + * @throws JsonValidationException if the input OAuth configuration is invalid + * @throws ResourceNotFoundProblem if the destination OAuth parameter configuration is not found + */ @Override public String getDestinationConsentUrl(final UUID workspaceId, final UUID destinationDefinitionId, @@ -203,6 +241,22 @@ public Map completeSourceOAuth(final UUID workspaceId, } + /** + * Completes the OAuth2 flow for a source by validating the input OAuth configuration, handling any + * ignored OAuth errors, merging the input configuration with the declarative input specification, + * and formatting the OAuth output. + * + * @param workspaceId the ID of the workspace + * @param sourceDefinitionId the ID of the source definition + * @param queryParams the query parameters from the OAuth callback + * @param redirectUrl the redirect URL used in the OAuth flow + * @param inputOAuthConfiguration the input OAuth configuration + * @param oauthConfigSpecification the OAuth configuration specification + * @param oauthParamConfig the OAuth parameter configuration + * @return a map containing the formatted OAuth output + * @throws IOException if an I/O error occurs during the OAuth flow + * @throws JsonValidationException if the input OAuth configuration is invalid + */ @Override public Map completeSourceOAuth(final UUID workspaceId, final UUID sourceDefinitionId, @@ -257,6 +311,22 @@ public Map completeDestinationOAuth(final UUID workspaceId, } + /** + * Completes the OAuth flow for a destination by validating the input configuration, handling any + * OAuth errors, merging the input configuration with the declarative input specification, and + * formatting the OAuth output. + * + * @param workspaceId the ID of the workspace + * @param destinationDefinitionId the ID of the destination definition + * @param queryParams the query parameters from the OAuth callback + * @param redirectUrl the redirect URL used in the OAuth flow + * @param inputOAuthConfiguration the input OAuth configuration + * @param oauthConfigSpecification the OAuth configuration specification + * @param oauthParamConfig the OAuth parameter configuration + * @return a map containing the formatted OAuth output + * @throws IOException if an I/O error occurs during the OAuth flow + * @throws JsonValidationException if the input OAuth configuration is invalid + */ @Override public Map completeDestinationOAuth(final UUID workspaceId, final UUID destinationDefinitionId, @@ -363,9 +433,9 @@ protected Map completeOAuthFlow(final String clientId, return getCompleteOAuthFlowOutput( formatAccessTokenUrl( getAccessTokenUrl(inputOAuthConfiguration), clientId, clientSecret, authCode, redirectUrl, inputOAuthConfiguration, state), - getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl, inputOAuthConfiguration), + getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl, state, inputOAuthConfiguration), getCompleteOAuthFlowRequestHeaders(clientId, clientSecret, authCode, redirectUrl, inputOAuthConfiguration), - tokenReqContentType, + getRequestContentType(inputOAuthConfiguration), inputOAuthConfiguration); } @@ -398,6 +468,7 @@ protected Map getAccessTokenQueryParameters(final String clientI final String clientSecret, final String authCode, final String redirectUrl, + final String state, final JsonNode inputOAuthConfiguration) { return getAccessTokenQueryParameters(clientId, clientSecret, authCode, redirectUrl); } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java index 711b56967c2..3eeb453116b 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/OAuthImplementationFactory.java @@ -157,11 +157,27 @@ private static boolean hasDeclarativeOAuthConfigSpecification(final ConnectorSpe * @return OAuthFlowImplementation */ public OAuthFlowImplementation create(final String imageName, final ConnectorSpecification connectorSpecification) { - if (hasDeclarativeOAuthConfigSpecification(connectorSpecification)) { - LOGGER.info("Using DeclarativeOAuthFlow for {}", imageName); - return new DeclarativeOAuthFlow(httpClient); + try { + return createDeclarativeOAuthImplementation(connectorSpecification); + } catch (final IllegalStateException e) { + return createNonDeclarativeOAuthImplementation(imageName); } + } + + /** + * Creates a DeclarativeOAuthFlow for a given connector spec. + * + * @param connectorSpecification - the spec for the connector + * @return DeclarativeOAuthFlow + */ + public DeclarativeOAuthFlow createDeclarativeOAuthImplementation(final ConnectorSpecification connectorSpecification) { + if (!hasDeclarativeOAuthConfigSpecification(connectorSpecification)) { + throw new IllegalStateException("Cannot create DeclarativeOAuthFlow without a declarative OAuth config spec."); + } + return new DeclarativeOAuthFlow(httpClient); + } + private OAuthFlowImplementation createNonDeclarativeOAuthImplementation(final String imageName) { if (oauthFlowMapping.containsKey(imageName)) { LOGGER.info("Using {} for {}", oauthFlowMapping.get(imageName).getClass().getSimpleName(), imageName); return oauthFlowMapping.get(imageName); diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/declarative/DeclarativeOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/declarative/DeclarativeOAuthFlow.java index 453a93fd54f..7971375bb2c 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/declarative/DeclarativeOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/declarative/DeclarativeOAuthFlow.java @@ -6,11 +6,16 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; +import io.airbyte.api.problems.model.generated.ProblemResourceData; +import io.airbyte.api.problems.throwable.generated.ResourceNotFoundProblem; +import io.airbyte.commons.json.Jsons; +import io.airbyte.config.ConfigSchema; import io.airbyte.oauth.BaseOAuth2Flow; +import io.airbyte.protocol.models.OAuthConfigSpecification; +import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.net.URISyntaxException; import java.net.http.HttpClient; -import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.function.Supplier; @@ -29,6 +34,30 @@ public DeclarativeOAuthFlow(final HttpClient httpClient, final Supplier super(httpClient, stateSupplier); } + /** + * Validates the input OAuth configuration against the provided OAuth configuration specification. + * Additionally, checks if the OAuth parameter configuration is defined. + * + * @param oauthConfigSpecification the specification against which the input OAuth configuration is + * validated + * @param inputOAuthConfiguration the input OAuth configuration to be validated + * @param oauthParamConfig the OAuth parameter configuration to be checked for null + * @throws IOException if an I/O error occurs during validation + * @throws JsonValidationException if the input OAuth configuration does not conform to the + * specification + * @throws ResourceNotFoundProblem if the OAuth parameter configuration is null + */ + protected void validateInputOAuthConfiguration(final OAuthConfigSpecification oauthConfigSpecification, + final JsonNode inputOAuthConfiguration, + final JsonNode oauthParamConfig) + throws IOException, JsonValidationException { + validateInputOAuthConfiguration(oauthConfigSpecification, inputOAuthConfiguration); + if (oauthParamConfig == null) { + final ProblemResourceData problem = new ProblemResourceData().resourceType(ConfigSchema.SOURCE_OAUTH_PARAM.name()); + throw new ResourceNotFoundProblem("Undefined OAuth Parameter.", problem); + } + } + /** * Overrides the getState method to provide a customizable STATE parameter. * @@ -41,6 +70,66 @@ protected String getState(final JsonNode inputOAuthConfiguration) { return (state.isMissingNode()) ? getState() : specHandler.getConfigurableState(state); } + /** + * Generates the source consent URL for OAuth authentication. + * + * @param workspaceId the UUID of the workspace. + * @param sourceDefinitionId the UUID of the source definition. + * @param redirectUrl the URL to redirect to after consent is granted. + * @param inputOAuthConfiguration the input OAuth configuration as a JsonNode. + * @param oauthConfigSpecification the OAuth configuration specification. + * @param sourceOAuthParamConfig the source OAuth parameter configuration as a JsonNode. + * @return the formatted consent URL as a String. + * @throws IOException if an I/O error occurs. + * @throws JsonValidationException if the JSON validation fails. + */ + @Override + public String getSourceConsentUrl(final UUID workspaceId, + final UUID sourceDefinitionId, + final String redirectUrl, + final JsonNode inputOAuthConfiguration, + final OAuthConfigSpecification oauthConfigSpecification, + final JsonNode sourceOAuthParamConfig) + throws IOException, JsonValidationException { + validateInputOAuthConfiguration(oauthConfigSpecification, inputOAuthConfiguration, sourceOAuthParamConfig); + final JsonNode declarativeOAuthConfig = getOAuthDeclarativeInputSpec(oauthConfigSpecification); + return formatConsentUrl( + sourceDefinitionId, + getConfigValueUnsafe(sourceOAuthParamConfig, specHandler.getClientIdKey(declarativeOAuthConfig)), + redirectUrl, + Jsons.mergeNodes(inputOAuthConfiguration, declarativeOAuthConfig)); + + } + + /** + * Generates the destination consent URL for OAuth authentication. + * + * @param workspaceId the UUID of the workspace + * @param destinationDefinitionId the UUID of the destination definition + * @param redirectUrl the URL to redirect to after consent is granted + * @param inputOAuthConfiguration the input OAuth configuration as a JsonNode + * @param oauthConfigSpecification the OAuth configuration specification + * @param destinationOAuthParamConfig the destination OAuth parameter configuration as a JsonNode + * @return the formatted consent URL as a String + * @throws IOException if an I/O error occurs + * @throws JsonValidationException if the JSON validation fails + */ + @Override + public String getDestinationConsentUrl(final UUID workspaceId, + final UUID destinationDefinitionId, + final String redirectUrl, + final JsonNode inputOAuthConfiguration, + final OAuthConfigSpecification oauthConfigSpecification, + final JsonNode destinationOAuthParamConfig) + throws IOException, JsonValidationException { + validateInputOAuthConfiguration(oauthConfigSpecification, inputOAuthConfiguration, destinationOAuthParamConfig); + final JsonNode declarativeOAuthConfig = getOAuthDeclarativeInputSpec(oauthConfigSpecification); + return formatConsentUrl(destinationDefinitionId, + getConfigValueUnsafe(destinationOAuthParamConfig, specHandler.getClientIdKey(declarativeOAuthConfig)), + redirectUrl, + Jsons.mergeNodes(inputOAuthConfiguration, declarativeOAuthConfig)); + } + /** * IMPORTANT: DO NOT MODIFY! * @@ -75,22 +164,53 @@ protected String formatConsentUrl(final UUID definitionId, } } + /** + * Determines the content type for the token request based on the provided OAuth configuration. + * + * @param inputOAuthConfiguration the JSON node containing the OAuth configuration. + * @return the content type for the token request. If the configuration contains the access token + * parameters key, the content type is JSON. Otherwise, it delegates to the superclass + * implementation. + */ + @Override + protected TokenRequestContentType getRequestContentType(final JsonNode inputOAuthConfiguration) { + final JsonNode value = inputOAuthConfiguration.path(DeclarativeOAuthSpecHandler.ACCESS_TOKEN_PARAMS_KEY); + return (!value.isMissingNode()) ? TokenRequestContentType.JSON : super.getRequestContentType(inputOAuthConfiguration); + } + /** * IMPORTANT: DO NOT MODIFY! * - * This is the override for the base `getAccessTokenQueryParameters`. For the Declarative way of how - * the `access_token_url` is constructed, we use the `${placeHolders}` to provide the in-place - * interpolation, instead of having the complete `HashMap`. + * Generates the access token query parameters required for OAuth authentication. * - * @return An empty HashMap. + * @param clientId The client ID provided by the OAuth provider. + * @param clientSecret The client secret provided by the OAuth provider. + * @param authCode The authorization code received from the OAuth provider after user authorization. + * @param redirectUrl The redirect URL configured for the OAuth provider. + * @param state The state parameter to maintain state between the request and callback. + * @param inputOAuthConfiguration The JSON configuration containing additional OAuth parameters. + * @return A map containing the rendered access token query parameters or an empty HashMap. */ @Override protected Map getAccessTokenQueryParameters(final String clientId, final String clientSecret, final String authCode, final String redirectUrl, + final String state, final JsonNode inputOAuthConfiguration) { - return new HashMap<>(); + + final Map renderedAccessTokenQueryParams = + specHandler.renderConfigAccessTokenParams( + specHandler.getAccessTokenParamsTemplateValues( + inputOAuthConfiguration, + clientId, + clientSecret, + authCode, + redirectUrl, + state), + inputOAuthConfiguration); + + return renderedAccessTokenQueryParams; } /** @@ -183,6 +303,94 @@ protected String formatAccessTokenUrl(final String accessTokenUrl, } } + /** + * Completes the OAuth flow for a source. + * + * @param workspaceId the ID of the workspace + * @param sourceDefinitionId the ID of the source definition + * @param queryParams the query parameters from the OAuth callback + * @param redirectUrl the redirect URL used in the OAuth flow + * @param inputOAuthConfiguration the initial OAuth configuration provided by the user + * @param oauthConfigSpecification the specification for the OAuth configuration + * @param oauthParamConfig the OAuth parameter configuration + * @return a map containing the completed OAuth configuration + * @throws IOException if an I/O error occurs + * @throws JsonValidationException if the input OAuth configuration is invalid + */ + @Override + public Map completeSourceOAuth(final UUID workspaceId, + final UUID sourceDefinitionId, + final Map queryParams, + final String redirectUrl, + final JsonNode inputOAuthConfiguration, + final OAuthConfigSpecification oauthConfigSpecification, + final JsonNode oauthParamConfig) + throws IOException, JsonValidationException { + validateInputOAuthConfiguration(oauthConfigSpecification, inputOAuthConfiguration); + if (containsIgnoredOAuthError(queryParams)) { + return buildRequestError(queryParams); + } + + final JsonNode declarativeOAuthConfig = getOAuthDeclarativeInputSpec(oauthConfigSpecification); + final JsonNode oauthConfigurationMerged = Jsons.mergeNodes(inputOAuthConfiguration, declarativeOAuthConfig); + + return formatOAuthOutput( + oauthParamConfig, + completeOAuthFlow( + getConfigValueUnsafe(oauthParamConfig, specHandler.getClientIdKey(declarativeOAuthConfig)), + getConfigValueUnsafe(oauthParamConfig, specHandler.getClientSecretKey(declarativeOAuthConfig)), + extractCodeParameter(queryParams, oauthConfigurationMerged), + redirectUrl, + oauthConfigurationMerged, + oauthParamConfig, + extractStateParameter(queryParams, oauthConfigurationMerged)), + oauthConfigSpecification); + } + + /** + * Completes the OAuth flow for a destination by validating the input OAuth configuration, merging + * it with the declarative OAuth configuration, and formatting the output. + * + * @param workspaceId the ID of the workspace + * @param destinationDefinitionId the ID of the destination definition + * @param queryParams the query parameters from the OAuth callback + * @param redirectUrl the redirect URL used in the OAuth flow + * @param inputOAuthConfiguration the input OAuth configuration provided by the user + * @param oauthConfigSpecification the OAuth configuration specification + * @param oauthParamConfig the OAuth parameter configuration + * @return a map containing the completed OAuth configuration + * @throws IOException if an I/O error occurs during the OAuth flow + * @throws JsonValidationException if the input OAuth configuration is invalid + */ + @Override + public Map completeDestinationOAuth(final UUID workspaceId, + final UUID destinationDefinitionId, + final Map queryParams, + final String redirectUrl, + final JsonNode inputOAuthConfiguration, + final OAuthConfigSpecification oauthConfigSpecification, + final JsonNode oauthParamConfig) + throws IOException, JsonValidationException { + validateInputOAuthConfiguration(oauthConfigSpecification, inputOAuthConfiguration); + if (containsIgnoredOAuthError(queryParams)) { + return buildRequestError(queryParams); + } + + final JsonNode declarativeOAuthConfig = getOAuthDeclarativeInputSpec(oauthConfigSpecification); + final JsonNode oauthConfigurationMerged = Jsons.mergeNodes(inputOAuthConfiguration, declarativeOAuthConfig); + return formatOAuthOutput( + oauthParamConfig, + completeOAuthFlow( + getConfigValueUnsafe(oauthParamConfig, specHandler.getClientIdKey(declarativeOAuthConfig)), + getConfigValueUnsafe(oauthParamConfig, specHandler.getClientSecretKey(declarativeOAuthConfig)), + extractCodeParameter(queryParams, oauthConfigurationMerged), + redirectUrl, + oauthConfigurationMerged, + oauthParamConfig, + extractStateParameter(queryParams, oauthConfigurationMerged)), + oauthConfigSpecification); + } + /** * IMPORTANT: DO NOT MODIFY! * @@ -203,13 +411,15 @@ protected Map extractOAuthOutput(final JsonNode data, } /** - * This function should parse and extract the state from these query parameters in order to continue - * the OAuth Flow. + * Extracts the state parameter from the query parameters based on the input OAuth configuration. + * + * @param queryParams the map of query parameters from the redirect URI + * @param inputOAuthConfiguration the JSON node containing the OAuth configuration + * @return the state parameter value if present in the query parameters + * @throws IOException if the state key is not found in the query parameters */ @Override - protected String extractStateParameter(final Map queryParams, - final JsonNode inputOAuthConfiguration) - throws IOException { + protected String extractStateParameter(final Map queryParams, final JsonNode inputOAuthConfiguration) throws IOException { // get the state key name with respect to userConfig input final String stateKey = specHandler.getStateKey(inputOAuthConfiguration); if (queryParams.containsKey(stateKey)) { @@ -220,4 +430,24 @@ protected String extractStateParameter(final Map queryParams, } } + /** + * Extracts the authorization code parameter from the provided query parameters. + * + * @param queryParams a map containing the query parameters from the redirect URI + * @param inputOAuthConfiguration a JsonNode containing the OAuth configuration + * @return the extracted authorization code as a String + * @throws IOException if the authorization code key is not found in the query parameters + */ + protected String extractCodeParameter(final Map queryParams, final JsonNode inputOAuthConfiguration) throws IOException { + // get the auth code key name with respect to userConfig input + final String authCodeKey = specHandler.getAuthCodeKey(inputOAuthConfiguration); + if (queryParams.containsKey(authCodeKey)) { + return (String) queryParams.get(authCodeKey); + } else { + final String errorMsg = + String.format("Undefined `auth_code_key`: '%s' from `redirect_uri`. Keys available: %s", authCodeKey, queryParams.keySet()); + throw new IOException(errorMsg); + } + } + } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/declarative/DeclarativeOAuthSpecHandler.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/declarative/DeclarativeOAuthSpecHandler.java index 7bcf39aaded..4808da164f7 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/declarative/DeclarativeOAuthSpecHandler.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/declarative/DeclarativeOAuthSpecHandler.java @@ -4,9 +4,9 @@ package io.airbyte.oauth.declarative; -import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.annotations.VisibleForTesting; +import io.airbyte.commons.json.JsonPaths; import io.airbyte.commons.json.Jsons; import java.io.IOException; import java.security.SecureRandom; @@ -49,19 +49,13 @@ public void setClock(final Clock clock) { this.clock = clock; } - /** - * TypeReference to cover expected objects in the userConfig. - */ - private static final TypeReference> STATE_TYPE_REF = new TypeReference<>() {}; - private static final TypeReference> COMPLETE_OAUTH_HEADERS_TYPE_REF = new TypeReference<>() {}; - private static final TypeReference> EXTRACT_OUTPUT_TYPE_REF = new TypeReference<>() {}; - /** * The Airbyte Protocol declared literals for an easy access and reuse. */ protected static final String ACCESS_TOKEN = "access_token"; protected static final String ACCESS_TOKEN_HEADERS_KEY = "access_token_headers"; protected static final String ACCESS_TOKEN_KEY = "access_token_key"; + protected static final String ACCESS_TOKEN_PARAMS_KEY = "access_token_params"; protected static final String ACCESS_TOKEN_URL = "access_token_url"; protected static final String AUTH_CODE_KEY = "auth_code_key"; protected static final String AUTH_CODE_VALUE = "code"; @@ -79,6 +73,8 @@ public void setClock(final Clock clock) { protected static final String STATE_KEY = "state_key"; protected static final String STATE_VALUE = "state"; protected static final String STATE_PARAM_KEY = STATE_VALUE; + protected static final String STATE_PARAM_MIN_KEY = "min"; + protected static final String STATE_PARAM_MAX_KEY = "max"; protected static final String TOKEN_EXPIRY_KEY = "expires_in"; protected static final String TOKEN_EXPIRY_DATE_KEY = "token_expiry_date"; @@ -152,6 +148,36 @@ protected final String getStateKey(final JsonNode userConfig) { return userConfig.path(STATE_KEY).asText(STATE_VALUE); } + /** + * Retrieves the authorization code key from the provided user configuration. + * + * @param userConfig the JSON node containing user configuration details + * @return the authorization code key as a string + */ + protected final String getAuthCodeKey(final JsonNode userConfig) { + return userConfig.path(AUTH_CODE_KEY).asText(AUTH_CODE_VALUE); + } + + /** + * Retrieves the client ID key from the provided user configuration. + * + * @param userConfig the JSON node containing the user configuration + * @return the client ID key as a string, or a default value if the key is not present + */ + protected final String getClientIdKey(final JsonNode userConfig) { + return userConfig.path(CLIENT_ID_KEY).asText(CLIENT_ID_VALUE); + } + + /** + * Retrieves the client secret key from the provided user configuration. + * + * @param userConfig the JSON node containing the user configuration + * @return the client secret key as a string, or a default value if the key is not present + */ + protected final String getClientSecretKey(final JsonNode userConfig) { + return userConfig.path(CLIENT_SECRET_KEY).asText(CLIENT_SECRET_VALUE); + } + /** * Generates a configurable state string based on the provided JSON configuration. * @@ -163,9 +189,9 @@ protected final String getStateKey(final JsonNode userConfig) { */ protected final String getConfigurableState(final JsonNode stateConfig) { - final Map state = Jsons.object(stateConfig, STATE_TYPE_REF); - final int min = state.getOrDefault("min", STATE_LEN_MIN); - final int max = state.getOrDefault("max", STATE_LEN_MAX); + final Map userState = Jsons.deserializeToIntegerMap(stateConfig); + final int min = userState.getOrDefault(STATE_PARAM_MIN_KEY, STATE_LEN_MIN); + final int max = userState.getOrDefault(STATE_PARAM_MAX_KEY, STATE_LEN_MAX); final int length = secureRandom.nextInt((max - min) + 1) + min; final StringBuilder stateValue = new StringBuilder(length); @@ -275,6 +301,34 @@ protected Map getAccessTokenUrlTemplateValues(final JsonNode use return templateValues; } + /** + * Generates a map of template values required for obtaining an access token. + * + * @param userConfig the user configuration as a JsonNode + * @param clientId the client ID for OAuth + * @param clientSecret the client secret for OAuth + * @param authCode the authorization code received from the authorization server + * @param redirectUrl the redirect URI used in the OAuth flow + * @param state the state parameter to maintain state between the request and callback + * @return a map containing the template values for the access token request + */ + protected Map getAccessTokenParamsTemplateValues(final JsonNode userConfig, + final String clientId, + final String clientSecret, + final String authCode, + final String redirectUrl, + final String state) { + + final Map templateValues = createDefaultTemplateMap(userConfig); + templateValues.put(templateValues.get(CLIENT_ID_KEY), clientId); + templateValues.put(templateValues.get(CLIENT_SECRET_KEY), clientSecret); + templateValues.put(templateValues.get(AUTH_CODE_KEY), authCode); + templateValues.put(templateValues.get(REDIRECT_URI_KEY), redirectUrl); + templateValues.put(templateValues.get(STATE_KEY), state); + + return templateValues; + } + /** * Renders a string template by replacing placeholders with corresponding values from the provided * map. @@ -326,14 +380,42 @@ protected void checkContext(final String templateString) throws IOException { * @return a list of strings representing the configuration extract output. */ protected final List getConfigExtractOutput(final JsonNode userConfig) { + final List extractOutputConfig = Jsons.deserializeToStringList( + Jsons.getNodeOrEmptyObject(userConfig, EXTRACT_OUTPUT_KEY)); + + // match the default BaseOAuth2Flow behaviour, returning ["refresh_token"] by default. + return !extractOutputConfig.isEmpty() ? extractOutputConfig : List.of(REFRESH_TOKEN); + } - final JsonNode value = userConfig.path(EXTRACT_OUTPUT_KEY); - final List extractOutputConfig = value.isMissingNode() - ? List.of() - : Jsons.object(value, EXTRACT_OUTPUT_TYPE_REF); + /** + * Renders the access token parameters by replacing placeholders in the parameter keys and values + * with the corresponding values from the provided template values. + * + * @param templateValues a map containing the template values to be used for rendering the access + * token parameters + * @param userConfig a JsonNode containing the user configuration, which includes the access token + * parameters + * @return a map with the rendered access token parameters + * @throws RuntimeException if an IOException occurs during the rendering of the string templates + */ + protected final Map renderConfigAccessTokenParams(final Map templateValues, + final JsonNode userConfig) { + + final Map accessTokenParamsRendered = new HashMap<>(); + final Map userAccessTokenParams = Jsons.deserializeToStringMap( + Jsons.getNodeOrEmptyObject(userConfig, ACCESS_TOKEN_PARAMS_KEY)); + + userAccessTokenParams.forEach((paramKey, paramValue) -> { + try { + accessTokenParamsRendered.put( + renderStringTemplate(templateValues, paramKey), + renderStringTemplate(templateValues, paramValue)); + } catch (final IOException e) { + throw new RuntimeException(e); + } + }); - // match the default BaseOAuth2Flow behaviour - return (!extractOutputConfig.isEmpty()) ? extractOutputConfig : List.of(REFRESH_TOKEN); + return accessTokenParamsRendered; } /** @@ -347,12 +429,10 @@ protected final Map renderCompleteOAuthHeaders(final Map userHeaders = value.isMissingNode() - ? new HashMap<>() - : Jsons.object(value, COMPLETE_OAUTH_HEADERS_TYPE_REF); - final Map accessTokenHeadersRendered = new HashMap<>(); + final Map userHeaders = Jsons.deserializeToStringMap( + Jsons.getNodeOrEmptyObject(userConfig, ACCESS_TOKEN_HEADERS_KEY)); + userHeaders.forEach((headerKey, headerValue) -> { try { accessTokenHeadersRendered.put( @@ -367,14 +447,14 @@ protected final Map renderCompleteOAuthHeaders(final Map processOAuthOutput(final JsonNode userConfig, final JsonNode data, @@ -383,19 +463,33 @@ protected Map processOAuthOutput(final JsonNode userConfig, final Map oauth_output = new HashMap<>(); - for (final String item : getConfigExtractOutput(userConfig)) { - if (data.has(item)) { - if (TOKEN_EXPIRY_KEY.equals(item)) { - oauth_output.put( - TOKEN_EXPIRY_DATE_KEY, - Instant.now(clock).plusSeconds(data.get(TOKEN_EXPIRY_KEY).asInt()).toString()); + for (final String path : getConfigExtractOutput(userConfig)) { + final String value = JsonPaths.getSingleValueTextOrNull(data, path); + final String key = JsonPaths.getTargetKeyFromJsonPath(path); + + if (value != null) { + // handle `expires_in` presence + if (TOKEN_EXPIRY_KEY.equals(key)) { + oauth_output.put(TOKEN_EXPIRY_DATE_KEY, processExpiresIn(value)); } - oauth_output.put(item, data.get(item).asText()); + + oauth_output.put(key, value); } else { - throw new IOException(String.format("Missing '%s' in query params from %s", item, accessTokenUrl)); + throw new IOException(String.format("Missing '%s' in query params from %s", key, accessTokenUrl)); } } + return oauth_output; } + /** + * Processes the expiration time by adding the specified number of seconds to the current time. + * + * @param value the number of seconds to add to the current time, represented as a string + * @return a string representation of the new expiration time + */ + private String processExpiresIn(final String value) { + return Instant.now(clock).plusSeconds(Integer.parseInt(value)).toString(); + } + } diff --git a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java index 69c8a796ba9..c65bfeb0b5e 100644 --- a/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java +++ b/airbyte-oauth/src/main/java/io/airbyte/oauth/flows/HubspotOAuthFlow.java @@ -89,7 +89,8 @@ private String getOptionalScopes() { "crm.objects.owners.read", "crm.schemas.companies.read", "crm.schemas.custom.read", - "crm.schemas.deals.read"); + "crm.schemas.deals.read", + "crm.objects.leads.read"); } private String getRequiredScopes() { diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/declarative/DeclarativeOAuthSpecHandlerTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/declarative/DeclarativeOAuthSpecHandlerTest.java index 913e8a68866..281e0e2e9ef 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/declarative/DeclarativeOAuthSpecHandlerTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/declarative/DeclarativeOAuthSpecHandlerTest.java @@ -25,6 +25,9 @@ class DeclarativeOAuthSpecHandlerTest { private final SecureRandom secureRandom = mock(SecureRandom.class); private final DeclarativeOAuthSpecHandler handler = new DeclarativeOAuthSpecHandler(); + private static final String ACCESS_TOKEN_TEST_VALUE = "access_token_value"; + private static final String REFRESH_TOKEN_TEST_VALUE = "refresh_token_value"; + private static final String TEST_ACCESS_TOKEN_URL = "test_access_token_url"; private static final String TEST_CLIENT_ID = "test_client_id"; private static final String TEST_REDIRECT_URI = "test_redirect_uri"; private static final String TEST_STATE = "test_state"; @@ -110,15 +113,65 @@ void testRenderCompleteOAuthHeaders() throws IOException { assertEquals("header_value", headers.get("header_key")); } + /** + * Tests the processOAuthOutput method of the DeclarativeOAuthSpecHandler class. + * + * Examples: + * + * Input: ["access_token", "refresh_token"] Output: {"access_token": "access_token_value", + * "refresh_token": "refresh_token_value"} + * + * This test verifies that the processOAuthOutput method correctly extracts the access token from + * the provided user configuration and data. + * + */ @Test void testProcessOAuthOutput() throws IOException { - final JsonNode userConfig = Jsons.jsonNode( + final List extractOutputInputValues = List.of(DeclarativeOAuthSpecHandler.ACCESS_TOKEN, DeclarativeOAuthSpecHandler.REFRESH_TOKEN); + + final JsonNode userConfig = Jsons.jsonNode(Map.of(DeclarativeOAuthSpecHandler.EXTRACT_OUTPUT_KEY, extractOutputInputValues)); + final JsonNode jsonData = Jsons.jsonNode( Map.of( - DeclarativeOAuthSpecHandler.EXTRACT_OUTPUT_KEY, List.of(DeclarativeOAuthSpecHandler.ACCESS_TOKEN))); + DeclarativeOAuthSpecHandler.ACCESS_TOKEN, ACCESS_TOKEN_TEST_VALUE, + DeclarativeOAuthSpecHandler.REFRESH_TOKEN, REFRESH_TOKEN_TEST_VALUE)); + + final Map output = handler.processOAuthOutput(userConfig, jsonData, TEST_ACCESS_TOKEN_URL); + + assertEquals(ACCESS_TOKEN_TEST_VALUE, output.get(DeclarativeOAuthSpecHandler.ACCESS_TOKEN)); + assertEquals(REFRESH_TOKEN_TEST_VALUE, output.get(DeclarativeOAuthSpecHandler.REFRESH_TOKEN)); + } + + /** + * Tests the processOAuthOutput method to ensure it correctly extracts OAuth tokens from a nested + * JSON data structure. + * + * Examples: + * + * Input: ["main_data.nested_data.auth_data.access_token", "main_data.nested_data.refresh_token"] + * Output: {"access_token": "access_token_value", "refresh_token": "refresh_token_value"} + * + * The test constructs a JSON input with nested data, specifies the paths to the access token and + * refresh token, and verifies that the processOAuthOutput method correctly extracts these tokens + * into a map. + */ + @Test + void testProcessOAuthOutputFromNestedDataObject() throws IOException { + final String accessTokenEntry = "data.nested.auth." + DeclarativeOAuthSpecHandler.ACCESS_TOKEN; + final String refreshTokenEntry = "data.nested." + DeclarativeOAuthSpecHandler.REFRESH_TOKEN; + final List extractOutputInputValues = List.of(accessTokenEntry, refreshTokenEntry); + + final JsonNode userConfig = Jsons.jsonNode(Map.of(DeclarativeOAuthSpecHandler.EXTRACT_OUTPUT_KEY, extractOutputInputValues)); + final JsonNode jsonData = Jsons.jsonNode( + Map.of( + "data", Map.of( + "nested", Map.of( + DeclarativeOAuthSpecHandler.REFRESH_TOKEN, REFRESH_TOKEN_TEST_VALUE, + "auth", Map.of(DeclarativeOAuthSpecHandler.ACCESS_TOKEN, ACCESS_TOKEN_TEST_VALUE))))); + + final Map output = handler.processOAuthOutput(userConfig, jsonData, TEST_ACCESS_TOKEN_URL); - final JsonNode data = Jsons.jsonNode(Map.of(DeclarativeOAuthSpecHandler.ACCESS_TOKEN, "token_value")); - final Map output = handler.processOAuthOutput(userConfig, data, DeclarativeOAuthSpecHandler.ACCESS_TOKEN_KEY); - assertEquals("token_value", output.get(DeclarativeOAuthSpecHandler.ACCESS_TOKEN)); + assertEquals(ACCESS_TOKEN_TEST_VALUE, output.get(DeclarativeOAuthSpecHandler.ACCESS_TOKEN)); + assertEquals(REFRESH_TOKEN_TEST_VALUE, output.get(DeclarativeOAuthSpecHandler.REFRESH_TOKEN)); } } diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/DeclarativeOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/DeclarativeOAuthFlowTest.java index 6c683f12f21..979c796104c 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/DeclarativeOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/DeclarativeOAuthFlowTest.java @@ -50,7 +50,7 @@ protected String getExpectedConsentUrl() { final String expectedCodeChallenge = "S6aXNcpTdl7WpwnttWxuoja3GTo7KaazkMNG8PQ0Dk4="; return String.format( - "https://some.domain.com/oauth2/authorize?client_id=%s&redirect_uri=%s&scope=%s&state=%s&subdomain=%s&code_challenge=%s", + "https://some.domain.com/oauth2/authorize?my_client_id_key=%s&callback_uri=%s&scope=%s&my_state_key=%s&subdomain=%s&code_challenge=%s", expectedClientId, expectedRedirectUri, expectedScope, @@ -61,19 +61,23 @@ protected String getExpectedConsentUrl() { @Override protected JsonNode getInputOAuthConfiguration() { - return Jsons.jsonNode(Map.of( + return Jsons.jsonNode(Map.ofEntries( // the `subdomain` is a custom property passed by the user (test) - "subdomain", "test_subdomain", + Map.entry("subdomain", "test_subdomain"), // these are the part of the spec, // not all spec properties are provided, since they provide an override to the default values. - "consent_url", - "https://some.domain.com/oauth2/authorize?{client_id_key}={{client_id_key}}&{redirect_uri_key}={urlEncoder:{{redirect_uri_key}}}&{scope_key}={urlEncoder:{{scope_key}}}&{state_key}={{state_key}}&subdomain={subdomain}&code_challenge={codeChallengeS256:{{state_key}}}", - "scope", "test_scope_1 test_scope_2 test_scope_3", - "access_token_url", "https://some.domain.com/oauth2/token/", - "access_token_headers", Jsons.jsonNode(Map.of("test_header", "test_value")), - // "state", Jsons.jsonNode(Map.of("min", 43, "max", 128)), - // "state_key", "my_custom_state_key", - "extract_output", Jsons.jsonNode(List.of(ACCESS_TOKEN, REFRESH_TOKEN, EXPIRES_IN)))); + Map.entry("consent_url", + "https://some.domain.com/oauth2/authorize?{client_id_key}={{client_id_key}}&{redirect_uri_key}={urlEncoder:{{redirect_uri_key}}}&{scope_key}={urlEncoder:{{scope_key}}}&{state_key}={{state_key}}&subdomain={subdomain}&code_challenge={codeChallengeS256:{{state_key}}}"), + Map.entry("scope", "test_scope_1 test_scope_2 test_scope_3"), + Map.entry("access_token_url", "https://some.domain.com/oauth2/token/"), + Map.entry("access_token_headers", Jsons.jsonNode(Map.of("test_header", "test_value"))), + // Map.entry("state", Jsons.jsonNode(Map.of("min", 43, "max", 128))), + Map.entry("state_key", "my_state_key"), + Map.entry("client_id_key", "my_client_id_key"), + Map.entry("client_secret_key", "my_client_secret_key"), + Map.entry("auth_code_key", "my_auth_code_key"), + Map.entry("redirect_uri_key", "callback_uri"), + Map.entry("extract_output", Jsons.jsonNode(List.of(ACCESS_TOKEN, REFRESH_TOKEN, EXPIRES_IN))))); } @Override @@ -127,6 +131,10 @@ protected Map getExpectedFilteredOutput() { @Override protected Map getQueryParams() { return Map.of( + // keys override + "my_auth_code_key", "test_code", + "my_state_key", getConstantState(), + // default test values "code", "test_code", "state", getConstantState()); } diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java index d5a51f55d12..dff31afe90c 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/HubspotOAuthFlowTest.java @@ -15,7 +15,7 @@ protected BaseOAuthFlow getOAuthFlow() { @Override protected String getExpectedConsentUrl() { - return "https://app.hubspot.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scopes=crm.schemas.contacts.read+crm.objects.contacts.read&optional_scopes=content+automation+e-commerce+files+files.ui_hidden.read+forms+forms-uploaded-files+sales-email-read+tickets+crm.lists.read+crm.objects.companies.read+crm.objects.custom.read+crm.objects.deals.read+crm.objects.feedback_submissions.read+crm.objects.goals.read+crm.objects.owners.read+crm.schemas.companies.read+crm.schemas.custom.read+crm.schemas.deals.read"; + return "https://app.hubspot.com/oauth/authorize?client_id=test_client_id&redirect_uri=https%3A%2F%2Fairbyte.io&state=state&scopes=crm.schemas.contacts.read+crm.objects.contacts.read&optional_scopes=content+automation+e-commerce+files+files.ui_hidden.read+forms+forms-uploaded-files+sales-email-read+tickets+crm.lists.read+crm.objects.companies.read+crm.objects.custom.read+crm.objects.deals.read+crm.objects.feedback_submissions.read+crm.objects.goals.read+crm.objects.owners.read+crm.schemas.companies.read+crm.schemas.custom.read+crm.schemas.deals.read+crm.objects.leads.read"; } @Override diff --git a/airbyte-persistence/job-persistence/build.gradle.kts b/airbyte-persistence/job-persistence/build.gradle.kts index 0ed92bc7399..6a67321877c 100644 --- a/airbyte-persistence/job-persistence/build.gradle.kts +++ b/airbyte-persistence/job-persistence/build.gradle.kts @@ -4,8 +4,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(libs.bundles.micronaut.annotation.processor) ksp(platform(libs.micronaut.platform)) diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java index 2701e0c23a2..1c8119c79b0 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/DefaultJobCreator.java @@ -52,6 +52,7 @@ import io.airbyte.featureflag.Workspace; import jakarta.annotation.Nullable; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.List; import java.util.Optional; @@ -59,15 +60,17 @@ import java.util.UUID; import java.util.function.Function; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Default implementation of enqueueing a job. Hides the details of building the Job object and * storing it in the jobs db. */ -@Slf4j public class DefaultJobCreator implements JobCreator { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + // Resets use an empty source which doesn't have a source definition. private static final StandardSourceDefinition RESET_SOURCE_DEFINITION = null; private final JobPersistence jobPersistence; diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java index 5bee5e17bf8..d00bc3e033b 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/JobNotifier.java @@ -264,39 +264,40 @@ private NotificationItem createAndSend(final NotificationSettings notificationSe .map(FailureReason::getExternalMessage) .orElse(null); - SyncSummary.SyncSummaryBuilder summaryBuilder = SyncSummary.builder() - .workspace(WorkspaceInfo.builder() - .name(workspace.getName()).id(workspaceId).url(webUrlHelper.getWorkspaceUrl(workspaceId)).build()) - .connection(ConnectionInfo.builder().name(standardSync.getName()).id(standardSync.getConnectionId()) - .url(webUrlHelper.getConnectionUrl(workspaceId, standardSync.getConnectionId())).build()) - .source( - SourceInfo.builder() - .name(source.getName()).id(source.getSourceId()).url(webUrlHelper.getSourceUrl(workspaceId, source.getSourceId())).build()) - .destination(DestinationInfo.builder() - .name(destination.getName()).id(destination.getDestinationId()) - .url(webUrlHelper.getDestinationUrl(workspaceId, destination.getDestinationId())).build()) - .startedAt(Instant.ofEpochSecond(job.getCreatedAtInSecond())) - .finishedAt(Instant.ofEpochSecond(job.getUpdatedAtInSecond())) - .isSuccess(job.getStatus() == JobStatus.SUCCEEDED) - .jobId(job.getId()) - .errorMessage(failureMessage); + long bytesEmitted = 0; + long bytesCommitted = 0; + long recordsEmitted = 0; + long recordsFilteredOut = 0; + long bytesFilteredOut = 0; + long recordsCommitted = 0; if (syncStats != null) { - long bytesEmitted = syncStats.getBytesEmitted() != null ? syncStats.getBytesEmitted() : 0; - long bytesCommitted = syncStats.getBytesCommitted() != null ? syncStats.getBytesCommitted() : 0; - long recordsEmitted = syncStats.getRecordsEmitted() != null ? syncStats.getRecordsEmitted() : 0; - long recordsFilteredOut = syncStats.getRecordsFilteredOut() != null ? syncStats.getRecordsFilteredOut() : 0; - long bytesFilteredOut = syncStats.getBytesFilteredOut() != null ? syncStats.getBytesFilteredOut() : 0; - long recordsCommitted = syncStats.getRecordsCommitted() != null ? syncStats.getRecordsCommitted() : 0; - summaryBuilder.bytesEmitted(bytesEmitted) - .bytesCommitted(bytesCommitted) - .recordsEmitted(recordsEmitted) - .recordsFilteredOut(recordsFilteredOut) - .bytesFilteredOut(bytesFilteredOut) - .recordsCommitted(recordsCommitted); + bytesEmitted = syncStats.getBytesEmitted() != null ? syncStats.getBytesEmitted() : 0; + bytesCommitted = syncStats.getBytesCommitted() != null ? syncStats.getBytesCommitted() : 0; + recordsEmitted = syncStats.getRecordsEmitted() != null ? syncStats.getRecordsEmitted() : 0; + recordsFilteredOut = syncStats.getRecordsFilteredOut() != null ? syncStats.getRecordsFilteredOut() : 0; + bytesFilteredOut = syncStats.getBytesFilteredOut() != null ? syncStats.getBytesFilteredOut() : 0; + recordsCommitted = syncStats.getRecordsCommitted() != null ? syncStats.getRecordsCommitted() : 0; } - SyncSummary summary = summaryBuilder.build(); + SyncSummary summary = new SyncSummary( + new WorkspaceInfo(workspaceId, workspace.getName(), webUrlHelper.getWorkspaceUrl(workspaceId)), + new ConnectionInfo(standardSync.getConnectionId(), standardSync.getName(), + webUrlHelper.getConnectionUrl(workspaceId, standardSync.getConnectionId())), + new SourceInfo(source.getSourceId(), source.getName(), webUrlHelper.getSourceUrl(workspaceId, source.getSourceId())), + new DestinationInfo(destination.getDestinationId(), destination.getName(), + webUrlHelper.getDestinationUrl(workspaceId, destination.getDestinationId())), + job.getId(), + job.getStatus() == JobStatus.SUCCEEDED, + Instant.ofEpochSecond(job.getCreatedAtInSecond()), + Instant.ofEpochSecond(job.getUpdatedAtInSecond()), + bytesEmitted, + bytesCommitted, + recordsEmitted, + recordsCommitted, + recordsFilteredOut, + bytesFilteredOut, + failureMessage); if (notificationSettings != null) { if (FAILURE_NOTIFICATION.equalsIgnoreCase(action)) { diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java index 8f232496081..d3472b7218c 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java @@ -203,7 +203,7 @@ private static void traverseOAuthOutputPaths(final ConnectorSpecification spec, final BiConsumer> consumer) { final JsonNode outputSpecTop = spec.getAdvancedAuth().getOauthConfigSpecification().getCompleteOauthServerOutputSpecification(); final JsonNode outputSpec; - if (outputSpecTop.has(PROPERTIES)) { + if (outputSpecTop != null && outputSpecTop.has(PROPERTIES)) { outputSpec = outputSpecTop.get(PROPERTIES); } else { LOGGER.error(String.format("In %s's advanced_auth spec, completeOAuthServerOutputSpecification does not declare properties.", connectorName)); diff --git a/airbyte-server/build.gradle.kts b/airbyte-server/build.gradle.kts index 3a3f704b58c..149b2859b0f 100644 --- a/airbyte-server/build.gradle.kts +++ b/airbyte-server/build.gradle.kts @@ -6,8 +6,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) annotationProcessor(libs.micronaut.jaxrs.processor) @@ -47,6 +45,7 @@ dependencies { implementation(project(":oss:airbyte-api:problems-api")) implementation(project(":oss:airbyte-api:public-api")) implementation(project(":oss:airbyte-api:server-api")) + implementation(project(":oss:airbyte-audit-logging")) implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-converters")) @@ -65,6 +64,7 @@ dependencies { implementation(project(":oss:airbyte-config:specs")) implementation(project(":oss:airbyte-data")) implementation(project(":oss:airbyte-featureflag")) + implementation(project(":oss:airbyte-mappers")) implementation(project(":oss:airbyte-metrics:metrics-lib")) implementation(project(":oss:airbyte-db:db-lib")) implementation(project(":oss:airbyte-db:jooq")) @@ -83,13 +83,16 @@ dependencies { runtimeOnly(libs.hikaricp) runtimeOnly(libs.h2.database) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) testAnnotationProcessor(libs.micronaut.jaxrs.processor) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) + kspTest(platform(libs.micronaut.platform)) + kspTest(libs.bundles.micronaut.annotation.processor) + kspTest(libs.micronaut.jaxrs.processor) + kspTest(libs.bundles.micronaut.test.annotation.processor) + testImplementation(libs.bundles.micronaut.test) testImplementation(project(":oss:airbyte-test-utils")) testImplementation(libs.postgresql) @@ -108,11 +111,12 @@ dependencies { } // we want to be able to access the generated db files from config/init when we build the server docker image.) -val copySeed = tasks.register("copySeed") { - from("${project(":oss:airbyte-config:init").layout.buildDirectory.get()}/resources/main/config") - into("${project.layout.buildDirectory.get()}/config_init/resources/main/config") - dependsOn(project(":oss:airbyte-config:init").tasks.named("processResources")) -} +val copySeed = + tasks.register("copySeed") { + from("${project(":oss:airbyte-config:init").layout.buildDirectory.get()}/resources/main/config") + into("${project.layout.buildDirectory.get()}/config_init/resources/main/config") + dependsOn(project(":oss:airbyte-config:init").tasks.named("processResources")) + } // need to make sure that the files are in the resource directory before copying.) // tests require the seed to exist.) diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java index 20143acb730..60e91f62414 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectionApiController.java @@ -56,6 +56,7 @@ import io.airbyte.commons.server.handlers.SchedulerHandler; import io.airbyte.commons.server.handlers.StreamRefreshesHandler; import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors; +import io.airbyte.commons.server.services.ConnectionService; import io.airbyte.server.handlers.StreamStatusesHandler; import io.micronaut.context.annotation.Context; import io.micronaut.http.HttpStatus; @@ -68,14 +69,13 @@ import io.micronaut.security.rules.SecurityRule; import jakarta.validation.Valid; import jakarta.validation.constraints.NotNull; +import java.time.Instant; import java.util.ArrayList; import java.util.List; -import lombok.extern.slf4j.Slf4j; @Controller("/api/v1/connections") @Context @Secured(SecurityRule.IS_AUTHENTICATED) -@Slf4j public class ConnectionApiController implements ConnectionApi { private final ConnectionsHandler connectionsHandler; @@ -85,6 +85,7 @@ public class ConnectionApiController implements ConnectionApi { private final MatchSearchHandler matchSearchHandler; private final StreamRefreshesHandler streamRefreshesHandler; private final JobHistoryHandler jobHistoryHandler; + private final ConnectionService connectionService; public ConnectionApiController(final ConnectionsHandler connectionsHandler, final OperationsHandler operationsHandler, @@ -92,7 +93,8 @@ public ConnectionApiController(final ConnectionsHandler connectionsHandler, final StreamStatusesHandler streamStatusesHandler, final MatchSearchHandler matchSearchHandler, final StreamRefreshesHandler streamRefreshesHandler, - final JobHistoryHandler jobHistoryHandler) { + final JobHistoryHandler jobHistoryHandler, + final ConnectionService connectionService) { this.connectionsHandler = connectionsHandler; this.operationsHandler = operationsHandler; this.schedulerHandler = schedulerHandler; @@ -100,6 +102,7 @@ public ConnectionApiController(final ConnectionsHandler connectionsHandler, this.matchSearchHandler = matchSearchHandler; this.streamRefreshesHandler = streamRefreshesHandler; this.jobHistoryHandler = jobHistoryHandler; + this.connectionService = connectionService; } @Override @@ -107,7 +110,10 @@ public ConnectionApiController(final ConnectionsHandler connectionsHandler, @Secured({ADMIN}) @ExecuteOn(AirbyteTaskExecutors.IO) public InternalOperationResult autoDisableConnection(@Body final ConnectionIdRequestBody connectionIdRequestBody) { - return ApiHelper.execute(() -> connectionsHandler.autoDisableConnection(connectionIdRequestBody.getConnectionId())); + return ApiHelper.execute(() -> { + final boolean wasDisabled = connectionService.warnOrDisableForConsecutiveFailures(connectionIdRequestBody.getConnectionId(), Instant.now()); + return new InternalOperationResult().succeeded(wasDisabled); + }); } @Override diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorBuilderProjectApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorBuilderProjectApiController.java index b02a7d7ecc0..a340cacb641 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorBuilderProjectApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorBuilderProjectApiController.java @@ -14,6 +14,9 @@ import io.airbyte.api.generated.ConnectorBuilderProjectApi; import io.airbyte.api.model.generated.BuilderProjectForDefinitionRequestBody; import io.airbyte.api.model.generated.BuilderProjectForDefinitionResponse; +import io.airbyte.api.model.generated.BuilderProjectOauthConsentRequest; +import io.airbyte.api.model.generated.CompleteConnectorBuilderProjectOauthRequest; +import io.airbyte.api.model.generated.CompleteOAuthResponse; import io.airbyte.api.model.generated.ConnectorBuilderProjectForkRequestBody; import io.airbyte.api.model.generated.ConnectorBuilderProjectIdWithWorkspaceId; import io.airbyte.api.model.generated.ConnectorBuilderProjectRead; @@ -26,6 +29,7 @@ import io.airbyte.api.model.generated.DeclarativeManifestBaseImageRead; import io.airbyte.api.model.generated.DeclarativeManifestRequestBody; import io.airbyte.api.model.generated.ExistingConnectorBuilderProjectWithWorkspaceId; +import io.airbyte.api.model.generated.OAuthConsentRead; import io.airbyte.api.model.generated.SourceDefinitionIdBody; import io.airbyte.api.model.generated.WorkspaceIdRequestBody; import io.airbyte.commons.server.handlers.ConnectorBuilderProjectsHandler; @@ -162,4 +166,26 @@ public BuilderProjectForDefinitionResponse getConnectorBuilderProjectIdForDefini .execute(() -> connectorBuilderProjectsHandler.getConnectorBuilderProjectForDefinitionId(builderProjectForDefinitionRequestBody)); } + @Override + @Post(uri = "/get_oauth_consent_url") + @Status(HttpStatus.OK) + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) + @ExecuteOn(AirbyteTaskExecutors.IO) + @SuppressWarnings("LineLength") + public OAuthConsentRead getConnectorBuilderProjectOAuthConsent(@Body final BuilderProjectOauthConsentRequest builderProjectOauthConsentRequestBody) { + return ApiHelper + .execute(() -> connectorBuilderProjectsHandler.getConnectorBuilderProjectOAuthConsent(builderProjectOauthConsentRequestBody)); + } + + @Override + @Post(uri = "/complete_oauth") + @Status(HttpStatus.OK) + @Secured({WORKSPACE_READER, ORGANIZATION_READER}) + @ExecuteOn(AirbyteTaskExecutors.IO) + @SuppressWarnings("LineLength") + public CompleteOAuthResponse completeConnectorBuilderProjectOauth(@Body final CompleteConnectorBuilderProjectOauthRequest completeConnectorBuilderProjectOauthRequest) { + return ApiHelper + .execute(() -> connectorBuilderProjectsHandler.completeConnectorBuilderProjectOAuth(completeConnectorBuilderProjectOauthRequest)); + } + } diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java index eeb76ffd563..deeccdf79bd 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/PermissionApiController.java @@ -23,6 +23,7 @@ import io.airbyte.api.model.generated.PermissionUpdate; import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest; import io.airbyte.api.model.generated.UserIdRequestBody; +import io.airbyte.commons.annotation.AuditLogging; import io.airbyte.commons.server.handlers.PermissionHandler; import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors; import io.airbyte.validation.json.JsonValidationException; @@ -54,6 +55,7 @@ public PermissionApiController(final PermissionHandler permissionHandler) { @Secured({ORGANIZATION_ADMIN, WORKSPACE_ADMIN}) @Post("/create") @Override + @AuditLogging(provider = "createPermission") public PermissionRead createPermission(@Body final PermissionCreate permissionCreate) { return ApiHelper.execute(() -> { validatePermissionCreation(permissionCreate); @@ -80,6 +82,7 @@ public PermissionRead getPermission(@Body final PermissionIdRequestBody permissi @Secured({ORGANIZATION_ADMIN, WORKSPACE_ADMIN}) @Post("/update") @Override + @AuditLogging(provider = "updatePermission") public void updatePermission(@Body final PermissionUpdate permissionUpdate) { ApiHelper.execute(() -> { validatePermissionUpdate(permissionUpdate); @@ -97,6 +100,7 @@ private void validatePermissionUpdate(@Body final PermissionUpdate permissionUpd @Secured({ORGANIZATION_ADMIN, WORKSPACE_ADMIN}) @Post("/delete") @Override + @AuditLogging(provider = "deletePermission") public void deletePermission(@Body final PermissionIdRequestBody permissionIdRequestBody) { ApiHelper.execute(() -> { diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SecretsPersistenceConfigApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SecretsPersistenceConfigApiController.java index 3dd9377d7ec..8f89540c257 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SecretsPersistenceConfigApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/SecretsPersistenceConfigApiController.java @@ -28,12 +28,10 @@ import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; import java.util.Objects; -import lombok.extern.slf4j.Slf4j; @SuppressWarnings("PMD.PreserveStackTrace") @Controller("/api/v1/secrets_persistence_config") @Secured(SecurityRule.IS_AUTHENTICATED) -@Slf4j public class SecretsPersistenceConfigApiController implements SecretsPersistenceConfigApi { private final WorkspaceService workspaceService; diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java index 481f1562e8c..8547aef3714 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/SourceDefinitionApiController.java @@ -37,14 +37,17 @@ import io.micronaut.scheduling.annotation.ExecuteOn; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; -import lombok.extern.slf4j.Slf4j; +import java.lang.invoke.MethodHandles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Controller("/api/v1/source_definitions") @Context @Secured(SecurityRule.IS_AUTHENTICATED) -@Slf4j public class SourceDefinitionApiController implements SourceDefinitionApi { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final SourceDefinitionsHandler sourceDefinitionsHandler; private final EnterpriseSourceStubsHandler enterpriseSourceStubsHandler; private final ActorDefinitionAccessValidator accessValidator; diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java index a82cc132a8f..fd54c1b2b4a 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/UserInvitationApiController.java @@ -30,9 +30,7 @@ import jakarta.ws.rs.PathParam; import java.util.List; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; -@Slf4j @Controller("/api/v1/user_invitations") @Secured(SecurityRule.IS_AUTHENTICATED) public class UserInvitationApiController implements UserInvitationApi { diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java index b147cc3a32a..32b32fdf519 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/WebBackendApiController.java @@ -24,6 +24,8 @@ import io.airbyte.api.model.generated.WebBackendCronExpressionDescription; import io.airbyte.api.model.generated.WebBackendDescribeCronExpressionRequestBody; import io.airbyte.api.model.generated.WebBackendGeographiesListResult; +import io.airbyte.api.model.generated.WebBackendValidateMappersRequestBody; +import io.airbyte.api.model.generated.WebBackendValidateMappersResponse; import io.airbyte.api.model.generated.WebBackendWorkspaceState; import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; import io.airbyte.commons.lang.MoreBooleans; @@ -33,8 +35,10 @@ import io.airbyte.commons.server.handlers.WebBackendConnectionsHandler; import io.airbyte.commons.server.handlers.WebBackendGeographiesHandler; import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors; +import io.airbyte.commons.server.support.CurrentUserService; import io.airbyte.metrics.lib.TracingHelper; import io.airbyte.server.handlers.WebBackendCronExpressionHandler; +import io.airbyte.server.handlers.WebBackendMappersHandler; import io.micronaut.http.annotation.Body; import io.micronaut.http.annotation.Controller; import io.micronaut.http.annotation.Post; @@ -51,18 +55,24 @@ public class WebBackendApiController implements WebBackendApi { private final WebBackendGeographiesHandler webBackendGeographiesHandler; private final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler; private final WebBackendCronExpressionHandler webBackendCronExpressionHandler; + private final WebBackendMappersHandler webBackendMappersHandler; private final ApiAuthorizationHelper apiAuthorizationHelper; + private final CurrentUserService currentUserService; public WebBackendApiController(final WebBackendConnectionsHandler webBackendConnectionsHandler, final WebBackendGeographiesHandler webBackendGeographiesHandler, final WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler, final WebBackendCronExpressionHandler webBackendCronExpressionHandler, - final ApiAuthorizationHelper apiAuthorizationHelper) { + final WebBackendMappersHandler webBackendMappersHandler, + final ApiAuthorizationHelper apiAuthorizationHelper, + final CurrentUserService currentUserService) { this.webBackendConnectionsHandler = webBackendConnectionsHandler; this.webBackendGeographiesHandler = webBackendGeographiesHandler; this.webBackendCheckUpdatesHandler = webBackendCheckUpdatesHandler; this.webBackendCronExpressionHandler = webBackendCronExpressionHandler; + this.webBackendMappersHandler = webBackendMappersHandler; this.apiAuthorizationHelper = apiAuthorizationHelper; + this.currentUserService = currentUserService; } @Post("/state/get_type") @@ -105,9 +115,10 @@ public WebBackendConnectionRead webBackendGetConnection(@Body final WebBackendCo if (MoreBooleans.isTruthy(webBackendConnectionRequestBody.getWithRefreshedCatalog())) { // only allow refresh catalog if the user is at least a workspace editor or // organization editor for the connection's workspace - apiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacesPermissions( webBackendConnectionRequestBody.getConnectionId().toString(), Scope.CONNECTION, + currentUserService.getCurrentUser().getUserId(), Set.of(PermissionType.WORKSPACE_EDITOR, PermissionType.ORGANIZATION_EDITOR)); } return webBackendConnectionsHandler.webBackendGetConnection(webBackendConnectionRequestBody); @@ -156,6 +167,15 @@ public WebBackendConnectionRead webBackendUpdateConnection(@Body final WebBacken }); } + @SuppressWarnings("LineLength") + @Post("/connections/mappers/validate") + @Secured({WORKSPACE_EDITOR, ORGANIZATION_EDITOR}) + @ExecuteOn(AirbyteTaskExecutors.IO) + @Override + public WebBackendValidateMappersResponse webBackendValidateMappers(@Body final WebBackendValidateMappersRequestBody webBackendValidateMappersRequestBody) { + return ApiHelper.execute(() -> webBackendMappersHandler.validateMappers(webBackendValidateMappersRequestBody)); + } + @Post("/describe_cron_expression") @Secured({AUTHENTICATED_USER}) @ExecuteOn(AirbyteTaskExecutors.IO) diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/AnalyticsTrackingBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/AnalyticsTrackingBeanFactory.java index 2630779abea..dc16eb3f2d5 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/AnalyticsTrackingBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/AnalyticsTrackingBeanFactory.java @@ -67,7 +67,8 @@ public Function workspaceFetcher(final WorkspaceService wor workspace.getFeedbackDone(), Enums.convertTo(workspace.getDefaultGeography(), Geography.class), null, - workspace.getTombstone()); + workspace.getTombstone(), + null); } catch (final ConfigNotFoundException | JsonValidationException | IOException e) { // No longer throwing a runtime exception so that we can support the Airbyte API. return new WorkspaceRead( @@ -88,6 +89,7 @@ public Function workspaceFetcher(final WorkspaceService wor null, null, null, + null, null); } }; diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java index fb6b586251a..589d60718dd 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/ApplicationBeanFactory.java @@ -30,6 +30,7 @@ import io.airbyte.metrics.lib.MetricClient; import io.airbyte.metrics.lib.MetricClientFactory; import io.airbyte.metrics.lib.MetricEmittingApps; +import io.airbyte.oauth.OAuthImplementationFactory; import io.airbyte.persistence.job.DefaultJobCreator; import io.airbyte.persistence.job.JobNotifier; import io.airbyte.persistence.job.JobPersistence; @@ -219,6 +220,12 @@ public HttpClient httpClient() { return HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); } + @Singleton + @Named("oauthImplementationFactory") + public OAuthImplementationFactory oauthImplementationFactory() { + return new OAuthImplementationFactory(HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build()); + } + @Singleton public BuilderProjectUpdater builderProjectUpdater(final ConnectorBuilderService connectorBuilderService) { final var pathToConnectors = io.airbyte.commons.envvar.EnvVar.PATH_TO_CONNECTORS.fetch(); diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java index 992f786761c..69daf9b5f59 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java @@ -27,22 +27,24 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import javax.sql.DataSource; -import lombok.extern.slf4j.Slf4j; import org.flywaydb.core.Flyway; import org.jooq.DSLContext; import org.jooq.SQLDialect; import org.jooq.impl.DataSourceConnectionProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Micronaut bean factory for database-related singletons. */ @Factory -@Slf4j - @SuppressWarnings({"PMD.AvoidDuplicateLiterals", "LineLength"}) public class DatabaseBeanFactory { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String BASELINE_DESCRIPTION = "Baseline from file-based migration v1"; private static final Boolean BASELINE_ON_MIGRATION = true; private static final String INSTALLED_BY = "ServerApp"; diff --git a/airbyte-server/src/main/java/io/airbyte/server/handlers/UserInvitationHandler.java b/airbyte-server/src/main/java/io/airbyte/server/handlers/UserInvitationHandler.java index 5416d153b85..ff16fb559bb 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/handlers/UserInvitationHandler.java +++ b/airbyte-server/src/main/java/io/airbyte/server/handlers/UserInvitationHandler.java @@ -40,19 +40,22 @@ import io.airbyte.validation.json.JsonValidationException; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.time.OffsetDateTime; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; @Singleton -@Slf4j @SuppressWarnings({"PMD.PreserveStackTrace", "PMD.ExceptionAsFlowControl"}) public class UserInvitationHandler { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + static final String ACCEPT_INVITE_PATH = "/accept-invite?inviteCode="; static final int INVITE_EXPIRATION_DAYS = 7; static final String USER_INVITED = "User Invited"; diff --git a/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java b/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java index c2f92bf2af9..728de201d39 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java +++ b/airbyte-server/src/main/java/io/airbyte/server/pro/AirbyteAuthInternalTokenValidator.java @@ -13,7 +13,6 @@ import io.micronaut.security.authentication.Authentication; import io.micronaut.security.token.validator.TokenValidator; import jakarta.inject.Singleton; -import lombok.extern.slf4j.Slf4j; import org.reactivestreams.Publisher; import reactor.core.publisher.Flux; @@ -24,7 +23,6 @@ * is making the internal request. The webapp proxy unsets the X-Airbyte-Auth header, so this header * will only be present on internal requests. **/ -@Slf4j @Singleton @Requires(property = "micronaut.security.enabled", value = "true") diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt index 95c167e3c5d..dc98e0e18e1 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt @@ -5,8 +5,8 @@ import io.airbyte.api.generated.BillingApi import io.airbyte.api.model.generated.CustomerPortalRead import io.airbyte.api.model.generated.CustomerPortalRequestBody import io.airbyte.api.model.generated.ListInvoicesRead -import io.airbyte.api.model.generated.OrganizationBalanceRead import io.airbyte.api.model.generated.OrganizationIdRequestBody +import io.airbyte.api.model.generated.OrganizationSubscriptionInfoRead import io.airbyte.api.model.generated.OrganizationTrialStatusRead import io.airbyte.api.model.generated.PaymentInformationRead import io.airbyte.api.problems.throwable.generated.ApiNotImplementedInOssProblem @@ -42,11 +42,11 @@ open class BillingController : BillingApi { ): PaymentInformationRead = throw ApiNotImplementedInOssProblem() @RequiresIntent(Intent.ManageOrganizationBilling) - @Post("/organization_balance") + @Post("/subscription_info") @ExecuteOn(AirbyteTaskExecutors.IO) - override fun getOrganizationBalance( + override fun getSubscriptionInfo( @Body organizationIdRequestBody: OrganizationIdRequestBody, - ): OrganizationBalanceRead = throw ApiNotImplementedInOssProblem() + ): OrganizationSubscriptionInfoRead = throw ApiNotImplementedInOssProblem() @Post("/handle_webhook") @ExecuteOn(AirbyteTaskExecutors.WEBHOOK) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/DataplaneController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/DataplaneController.kt new file mode 100644 index 00000000000..0a6e2d5e31e --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/DataplaneController.kt @@ -0,0 +1,22 @@ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.generated.DataplaneApi +import io.airbyte.api.model.generated.DataplaneGetIdRequestBody +import io.airbyte.api.model.generated.DataplaneRead +import io.airbyte.server.services.DataplaneService +import io.micronaut.http.annotation.Controller + +@Controller("/api/v1/dataplanes") +class DataplaneController( + private val dataplaneService: DataplaneService, +) : DataplaneApi { + override fun getDataplaneId(dataplaneGetIdRequestBody: DataplaneGetIdRequestBody): DataplaneRead { + val connectionId = dataplaneGetIdRequestBody.connectionId + val actorType = dataplaneGetIdRequestBody.actorType + val actorId = dataplaneGetIdRequestBody.actorId + val workspaceId = dataplaneGetIdRequestBody.workspaceId + val queueName = dataplaneService.getQueueName(connectionId, actorType, actorId, workspaceId, dataplaneGetIdRequestBody.workloadPriority) + + return DataplaneRead().id(queueName) + } +} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigController.kt index 85637381ac1..a34730cda7f 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigController.kt @@ -3,18 +3,16 @@ package io.airbyte.server.apis.controllers import io.airbyte.api.generated.OrganizationPaymentConfigApi import io.airbyte.api.model.generated.OrganizationPaymentConfigRead import io.airbyte.api.problems.ResourceType -import io.airbyte.api.problems.model.generated.ProblemMessageData import io.airbyte.api.problems.model.generated.ProblemResourceData import io.airbyte.api.problems.throwable.generated.ResourceNotFoundProblem -import io.airbyte.api.problems.throwable.generated.StateConflictProblem import io.airbyte.commons.auth.generated.Intent import io.airbyte.commons.auth.permissions.RequiresIntent +import io.airbyte.commons.server.OrganizationId import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors +import io.airbyte.commons.server.services.OrganizationService import io.airbyte.config.OrganizationPaymentConfig import io.airbyte.config.OrganizationPaymentConfig.PaymentStatus import io.airbyte.config.OrganizationPaymentConfig.UsageCategoryOverride -import io.airbyte.data.services.OrganizationPaymentConfigService -import io.airbyte.data.services.OrganizationService import io.micronaut.http.HttpStatus import io.micronaut.http.annotation.Body import io.micronaut.http.annotation.Controller @@ -28,13 +26,16 @@ import java.time.Instant import java.time.OffsetDateTime import java.time.ZoneId import java.util.UUID +import io.airbyte.data.services.OrganizationPaymentConfigService as OrganizationPaymentConfigRepository +import io.airbyte.data.services.OrganizationService as OrganizationRepository private val UTC = ZoneId.of("UTC") @Controller("/api/v1/organization_payment_config") open class OrganizationPaymentConfigController( - private val organizationPaymentConfigService: OrganizationPaymentConfigService, private val organizationService: OrganizationService, + private val organizationPaymentConfigRepository: OrganizationPaymentConfigRepository, + private val organizationRepository: OrganizationRepository, ) : OrganizationPaymentConfigApi { @RequiresIntent(Intent.ManageOrganizationPaymentConfigs) @Get("/{organizationId}") @@ -42,7 +43,7 @@ open class OrganizationPaymentConfigController( override fun getOrganizationPaymentConfig( @PathVariable("organizationId") organizationId: UUID, ): OrganizationPaymentConfigRead = - organizationPaymentConfigService.findByOrganizationId(organizationId)?.toApiModel() + organizationPaymentConfigRepository.findByOrganizationId(organizationId)?.toApiModel() ?: throw ResourceNotFoundProblem( ProblemResourceData().resourceId(organizationId.toString()).resourceType(ResourceType.ORGANIZATION_PAYMENT_CONFIG), ) @@ -54,12 +55,12 @@ open class OrganizationPaymentConfigController( override fun deleteOrganizationPaymentConfig( @PathVariable("organizationId") organizationId: UUID, ) { - if (organizationPaymentConfigService.findByOrganizationId(organizationId) == null) { + if (organizationPaymentConfigRepository.findByOrganizationId(organizationId) == null) { throw ResourceNotFoundProblem( ProblemResourceData().resourceId(organizationId.toString()).resourceType(ResourceType.ORGANIZATION_PAYMENT_CONFIG), ) } - organizationPaymentConfigService.deletePaymentConfig(organizationId) + organizationPaymentConfigRepository.deletePaymentConfig(organizationId) } @RequiresIntent(Intent.ManageOrganizationPaymentConfigs) @@ -68,25 +69,7 @@ open class OrganizationPaymentConfigController( override fun endGracePeriod( @PathVariable("organizationId") organizationId: UUID, ) { - val orgPaymentConfig = - organizationPaymentConfigService.findByOrganizationId(organizationId) ?: throw ResourceNotFoundProblem( - ProblemResourceData().resourceId(organizationId.toString()).resourceType(ResourceType.ORGANIZATION_PAYMENT_CONFIG), - ) - - if (orgPaymentConfig.paymentStatus != PaymentStatus.GRACE_PERIOD) { - throw StateConflictProblem( - ProblemMessageData().message( - "OrganizationPaymentConfig paymentStatus is ${orgPaymentConfig.paymentStatus}, but expected ${PaymentStatus.GRACE_PERIOD}", - ), - ) - } - - organizationPaymentConfigService.savePaymentConfig( - orgPaymentConfig.apply { - paymentStatus = PaymentStatus.DISABLED - gracePeriodEndAt = null - }, - ) + organizationService.handlePaymentGracePeriodEnded(OrganizationId(organizationId)) } @RequiresIntent(Intent.ManageOrganizationPaymentConfigs) @@ -96,10 +79,10 @@ open class OrganizationPaymentConfigController( @Body organizationPaymentConfigUpdateRequestBody: OrganizationPaymentConfigRead, ): OrganizationPaymentConfigRead { val orgId = organizationPaymentConfigUpdateRequestBody.organizationId - if (organizationService.getOrganization(orgId).isEmpty) { + if (organizationRepository.getOrganization(orgId).isEmpty) { throw ResourceNotFoundProblem(ProblemResourceData().resourceId(orgId.toString()).resourceType(ResourceType.ORGANIZATION)) } - organizationPaymentConfigService.savePaymentConfig(organizationPaymentConfigUpdateRequestBody.toConfigModel()) + organizationPaymentConfigRepository.savePaymentConfig(organizationPaymentConfigUpdateRequestBody.toConfigModel()) return getOrganizationPaymentConfig(orgId) } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt index df0d24216d7..bc7c35ec31e 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/ConnectionsController.kt @@ -57,8 +57,8 @@ open class ConnectionsController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicCreateConnection(connectionCreateRequest: ConnectionCreateRequest): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(connectionCreateRequest.destinationId.toString()), + apiAuthorizationHelper.checkWorkspacePermission( + connectionCreateRequest.destinationId.toString(), Scope.DESTINATION, userId, PermissionType.WORKSPACE_EDITOR, @@ -199,8 +199,8 @@ open class ConnectionsController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicDeleteConnection(connectionId: String): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(connectionId), + apiAuthorizationHelper.checkWorkspacePermission( + connectionId, Scope.CONNECTION, userId, PermissionType.WORKSPACE_EDITOR, @@ -232,8 +232,8 @@ open class ConnectionsController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicGetConnection(connectionId: String): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(connectionId), + apiAuthorizationHelper.checkWorkspacePermission( + connectionId, Scope.CONNECTION, userId, PermissionType.WORKSPACE_READER, @@ -264,7 +264,7 @@ open class ConnectionsController( offset: Int, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacesPermission( workspaceIds?.let { workspaceIds.map { it.toString() } } ?: emptyList(), Scope.WORKSPACES, userId, @@ -300,8 +300,8 @@ open class ConnectionsController( @Valid @Body @NotNull connectionPatchRequest: ConnectionPatchRequest, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(connectionId), + apiAuthorizationHelper.checkWorkspacePermission( + connectionId, Scope.CONNECTION, userId, PermissionType.WORKSPACE_EDITOR, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DestinationsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DestinationsController.kt index 848c265b87a..bb4cd265ba6 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DestinationsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/DestinationsController.kt @@ -54,8 +54,8 @@ open class DestinationsController( destinationCreateRequest?.let { request -> val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(destinationCreateRequest.workspaceId.toString()), + apiAuthorizationHelper.checkWorkspacePermission( + destinationCreateRequest.workspaceId.toString(), Scope.WORKSPACE, userId, PermissionType.WORKSPACE_EDITOR, @@ -115,8 +115,8 @@ open class DestinationsController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicDeleteDestination(destinationId: String): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(destinationId), + apiAuthorizationHelper.checkWorkspacePermission( + destinationId, Scope.DESTINATION, userId, PermissionType.WORKSPACE_EDITOR, @@ -147,8 +147,8 @@ open class DestinationsController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicGetDestination(destinationId: String): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(destinationId), + apiAuthorizationHelper.checkWorkspacePermission( + destinationId, Scope.DESTINATION, userId, PermissionType.WORKSPACE_READER, @@ -184,7 +184,7 @@ open class DestinationsController( offset: Int, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacesPermission( workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, @@ -220,8 +220,8 @@ open class DestinationsController( destinationPatchRequest: DestinationPatchRequest?, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(destinationId), + apiAuthorizationHelper.checkWorkspacePermission( + destinationId, Scope.DESTINATION, userId, PermissionType.WORKSPACE_EDITOR, @@ -258,8 +258,8 @@ open class DestinationsController( destinationPutRequest: DestinationPutRequest?, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(destinationId), + apiAuthorizationHelper.checkWorkspacePermission( + destinationId, Scope.DESTINATION, userId, PermissionType.WORKSPACE_EDITOR, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/JobsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/JobsController.kt index 5b990937d4c..46f51a3ef06 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/JobsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/JobsController.kt @@ -56,8 +56,8 @@ open class JobsController( @PathParam("jobId") jobId: Long, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(jobId.toString()), + apiAuthorizationHelper.checkWorkspacePermission( + jobId.toString(), Scope.JOB, userId, PermissionType.WORKSPACE_RUNNER, @@ -92,15 +92,15 @@ open class JobsController( // Only Editor and above should be able to run a Clear. when (jobCreateRequest.jobType) { JobTypeEnum.CLEAR -> - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(jobCreateRequest.connectionId), + apiAuthorizationHelper.checkWorkspacePermission( + jobCreateRequest.connectionId, Scope.CONNECTION, userId, PermissionType.WORKSPACE_EDITOR, ) else -> - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(jobCreateRequest.connectionId), + apiAuthorizationHelper.checkWorkspacePermission( + jobCreateRequest.connectionId, Scope.CONNECTION, userId, PermissionType.WORKSPACE_RUNNER, @@ -209,8 +209,8 @@ open class JobsController( @PathParam("jobId") jobId: Long, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(jobId.toString()), + apiAuthorizationHelper.checkWorkspacePermission( + jobId.toString(), Scope.JOB, userId, PermissionType.WORKSPACE_READER, @@ -255,14 +255,14 @@ open class JobsController( ): Response { val userId: UUID = currentUserService.currentUser.userId if (connectionId != null) { - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(connectionId), + apiAuthorizationHelper.checkWorkspacePermission( + connectionId, Scope.CONNECTION, userId, PermissionType.WORKSPACE_READER, ) } else { - apiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacesPermission( workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/SourcesController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/SourcesController.kt index 08fcd5d5baf..4e0ae0d5190 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/SourcesController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/SourcesController.kt @@ -52,7 +52,7 @@ open class SourcesController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicCreateSource(sourceCreateRequest: SourceCreateRequest?): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacesPermission( sourceCreateRequest?.let { listOf(it.workspaceId.toString()) } ?: emptyList(), Scope.WORKSPACE, userId, @@ -114,8 +114,8 @@ open class SourcesController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicDeleteSource(sourceId: String): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(sourceId), + apiAuthorizationHelper.checkWorkspacePermission( + sourceId, Scope.SOURCE, userId, PermissionType.WORKSPACE_EDITOR, @@ -147,8 +147,8 @@ open class SourcesController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicGetSource(sourceId: String): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(sourceId), + apiAuthorizationHelper.checkWorkspacePermission( + sourceId, Scope.SOURCE, userId, PermissionType.WORKSPACE_READER, @@ -180,8 +180,8 @@ open class SourcesController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun initiateOAuth(initiateOauthRequest: InitiateOauthRequest): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(initiateOauthRequest.workspaceId.toString()), + apiAuthorizationHelper.checkWorkspacePermission( + initiateOauthRequest.workspaceId.toString(), Scope.WORKSPACE, userId, PermissionType.WORKSPACE_EDITOR, @@ -197,7 +197,7 @@ open class SourcesController( offset: Int, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacesPermission( workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, @@ -234,8 +234,8 @@ open class SourcesController( sourcePatchRequest: SourcePatchRequest?, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(sourceId), + apiAuthorizationHelper.checkWorkspacePermission( + sourceId, Scope.SOURCE, userId, PermissionType.WORKSPACE_EDITOR, @@ -276,8 +276,8 @@ open class SourcesController( sourcePutRequest: SourcePutRequest?, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(sourceId), + apiAuthorizationHelper.checkWorkspacePermission( + sourceId, Scope.SOURCE, userId, PermissionType.WORKSPACE_EDITOR, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/StreamsController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/StreamsController.kt index efaafffd7e4..d2ef7ea6497 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/StreamsController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/StreamsController.kt @@ -56,15 +56,15 @@ class StreamsController( ): Response { // Check permission for source and destination val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(sourceId), + apiAuthorizationHelper.checkWorkspacePermission( + sourceId, Scope.SOURCE, userId, PermissionType.WORKSPACE_READER, ) destinationId?.apply { - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(destinationId), + apiAuthorizationHelper.checkWorkspacePermission( + destinationId, Scope.DESTINATION, userId, PermissionType.WORKSPACE_READER, @@ -98,7 +98,9 @@ class StreamsController( emptyList() } val streamList = - httpResponse.catalog!!.streams.stream() + httpResponse.catalog!! + .streams + .stream() .map { obj: AirbyteStreamAndConfiguration -> obj.stream } .toList() val listOfStreamProperties = @@ -120,8 +122,7 @@ class StreamsController( return Response .status( HttpStatus.OK.code, - ) - .entity(listOfStreamProperties) + ).entity(listOfStreamProperties) .build() } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt index 73bcb91f710..da00b1e5b88 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/controllers/WorkspacesController.kt @@ -44,8 +44,8 @@ open class WorkspacesController( workspaceOAuthCredentialsRequest: WorkspaceOAuthCredentialsRequest, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(workspaceId), + apiAuthorizationHelper.checkWorkspacePermission( + workspaceId, Scope.WORKSPACE, userId, PermissionType.WORKSPACE_EDITOR, @@ -71,8 +71,8 @@ open class WorkspacesController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicDeleteWorkspace(workspaceId: String): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(workspaceId), + apiAuthorizationHelper.checkWorkspacePermission( + workspaceId, Scope.WORKSPACE, userId, PermissionType.WORKSPACE_EDITOR, @@ -84,8 +84,8 @@ open class WorkspacesController( @ExecuteOn(AirbyteTaskExecutors.PUBLIC_API) override fun publicGetWorkspace(workspaceId: String): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(workspaceId), + apiAuthorizationHelper.checkWorkspacePermission( + workspaceId, Scope.WORKSPACE, userId, PermissionType.WORKSPACE_READER, @@ -102,7 +102,7 @@ open class WorkspacesController( ): Response { val userId: UUID = currentUserService.currentUser.userId logger.debug { "listing workspaces: $workspaceIds" } - apiAuthorizationHelper.checkWorkspacePermissions( + apiAuthorizationHelper.checkWorkspacesPermission( workspaceIds?.map { it.toString() } ?: emptyList(), Scope.WORKSPACES, userId, @@ -124,8 +124,8 @@ open class WorkspacesController( workspaceUpdateRequest: WorkspaceUpdateRequest, ): Response { val userId: UUID = currentUserService.currentUser.userId - apiAuthorizationHelper.checkWorkspacePermissions( - listOf(workspaceId), + apiAuthorizationHelper.checkWorkspacePermission( + workspaceId, Scope.WORKSPACE, userId, PermissionType.WORKSPACE_EDITOR, diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt index f762aab4b26..f69f3f4a422 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/helpers/AirbyteCatalogHelper.kt @@ -313,6 +313,7 @@ object AirbyteCatalogHelper { private fun configuredMapperConverter(publicApiMappers: ConfiguredStreamMapper): io.airbyte.api.model.generated.ConfiguredStreamMapper = io.airbyte.api.model.generated.ConfiguredStreamMapper().apply { + id = publicApiMappers.id type = mapperTypeConverter(publicApiMappers.type) mapperConfiguration = publicApiMappers.mapperConfiguration } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionReadMapper.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionReadMapper.kt index 8d1b7ce4b81..1c086c79c5f 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionReadMapper.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/publicapi/mappers/ConnectionReadMapper.kt @@ -133,6 +133,7 @@ object ConnectionReadMapper { private fun convertMappers(mappers: List?): List? { return mappers?.map { mapper -> io.airbyte.publicApi.server.generated.models.ConfiguredStreamMapper( + id = mapper.id, type = StreamMapperType.decode(mapper.type.toString()) ?: throw IllegalArgumentException("Invalid stream mapper type"), mapperConfiguration = mapper.mapperConfiguration, ) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/handlers/WebBackendCronExpressionHandler.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/handlers/WebBackendCronExpressionHandler.kt index 6f98010e580..ec1e5afe887 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/handlers/WebBackendCronExpressionHandler.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/handlers/WebBackendCronExpressionHandler.kt @@ -1,44 +1,30 @@ package io.airbyte.server.handlers -import com.cronutils.descriptor.CronDescriptor -import com.cronutils.model.CronType -import com.cronutils.model.definition.CronDefinitionBuilder -import com.cronutils.model.time.ExecutionTime -import com.cronutils.parser.CronParser import io.airbyte.api.model.generated.WebBackendCronExpressionDescription import io.airbyte.api.model.generated.WebBackendDescribeCronExpressionRequestBody import io.airbyte.api.problems.model.generated.ProblemCronExpressionData import io.airbyte.api.problems.throwable.generated.CronValidationInvalidExpressionProblem +import io.airbyte.commons.server.helpers.CronExpressionHelper import jakarta.inject.Singleton -import java.time.ZonedDateTime -import java.util.Locale @Singleton -class WebBackendCronExpressionHandler { +open class WebBackendCronExpressionHandler( + private val cronExpressionHelper: CronExpressionHelper, +) { fun describeCronExpression(body: WebBackendDescribeCronExpressionRequestBody): WebBackendCronExpressionDescription? { - val cronDefinition = CronDefinitionBuilder.instanceDefinitionFor(CronType.QUARTZ) - try { - val cron = CronParser(cronDefinition).parse(body.cronExpression) - cron.validate() - - val description = CronDescriptor.instance(Locale.ENGLISH).describe(cron) - - val executionTime = ExecutionTime.forCron(cron) - val nextExecutions = mutableListOf() - var nextExecution = ZonedDateTime.now() - - for (i in 1..3) { - nextExecution = executionTime.nextExecution(nextExecution).orElse(null) ?: break - nextExecutions.add(nextExecution.toEpochSecond()) - } + val cron = cronExpressionHelper.validateCronExpression(body.cronExpression) return WebBackendCronExpressionDescription() .cronExpression(body.cronExpression) - .description(description) - .nextExecutions(nextExecutions) + .description(cronExpressionHelper.describeCronExpression(cron)) + .nextExecutions(cronExpressionHelper.getNextExecutions(cron, 3)) } catch (e: IllegalArgumentException) { - throw CronValidationInvalidExpressionProblem(ProblemCronExpressionData().cronExpression(body.cronExpression).validationErrorMessage(e.message)) + throw CronValidationInvalidExpressionProblem( + ProblemCronExpressionData() + .cronExpression(body.cronExpression) + .validationErrorMessage(e.message), + ) } } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/handlers/WebBackendMappersHandler.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/handlers/WebBackendMappersHandler.kt new file mode 100644 index 00000000000..32b9f2fe133 --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/handlers/WebBackendMappersHandler.kt @@ -0,0 +1,117 @@ +package io.airbyte.server.handlers + +import io.airbyte.api.model.generated.ConnectionIdRequestBody +import io.airbyte.api.model.generated.FieldSpec +import io.airbyte.api.model.generated.MapperValidationError +import io.airbyte.api.model.generated.MapperValidationErrorType +import io.airbyte.api.model.generated.MapperValidationResult +import io.airbyte.api.model.generated.WebBackendValidateMappersRequestBody +import io.airbyte.api.model.generated.WebBackendValidateMappersResponse +import io.airbyte.commons.server.handlers.ConnectionsHandler +import io.airbyte.commons.server.handlers.helpers.CatalogConverter +import io.airbyte.config.ConfiguredAirbyteCatalog +import io.airbyte.config.FieldType +import io.airbyte.config.MapperConfig +import io.airbyte.mappers.transformations.DestinationCatalogGenerator +import jakarta.inject.Singleton + +/** + * The web backend is an abstraction that allows the frontend to structure data in such a way that + * it is easier for a react frontend to consume. It should NOT have direct access to the database. + * It should operate exclusively by calling other endpoints that are exposed in the API. + */ +@Singleton +class WebBackendMappersHandler( + private val connectionsHandler: ConnectionsHandler, + private val catalogConverter: CatalogConverter, + private val destinationCatalogGenerator: DestinationCatalogGenerator, +) { + /** + * Progressively validate mappers and get resulting fields. + * Mappers are applied one by one to the stream to get the list of fields available for the next mapper. + */ + fun validateMappers(validateMappersRequest: WebBackendValidateMappersRequestBody): WebBackendValidateMappersResponse { + val connectionIdRequestBody = ConnectionIdRequestBody().connectionId(validateMappersRequest.connectionId) + val connection = connectionsHandler.getConnection(connectionIdRequestBody.connectionId) + val configuredCatalog = catalogConverter.toConfiguredInternal(connection.syncCatalog) + + val stream = + configuredCatalog.streams.first { + it.stream.name == validateMappersRequest.streamDescriptor.name && + it.stream.namespace == validateMappersRequest.streamDescriptor.namespace + } + val initialFields = + stream.fields!!.map { + FieldSpec() + .name(it.name) + .type(convertFieldType(it.type)) + }.toList() + + val partialMappers = mutableListOf() + stream.mappers = partialMappers + + val mapperValidationResults = mutableListOf() + val newMappers = catalogConverter.toConfiguredMappers(validateMappersRequest.mappers) + + // Trim down the catalog so we only process mappers for the stream we're working with + val slimCatalog = ConfiguredAirbyteCatalog(listOf(stream)) + + for (mapper in newMappers) { + partialMappers.add(mapper) + + val generationResult = destinationCatalogGenerator.generateDestinationCatalog(slimCatalog) + val newStream = generationResult.catalog.streams.first() + + val validateRes = MapperValidationResult() + validateRes.id = mapper.id() + validateRes.outputFields = + newStream.fields!!.map { + FieldSpec() + .name(it.name) + .type(convertFieldType(it.type)) + }.toList() + + val streamErrors = generationResult.errors.entries.firstOrNull()?.value + val mapperError = streamErrors?.get(mapper) + if (mapperError != null) { + validateRes.validationError = + MapperValidationError() + .type(convertMapperErrorType(mapperError.type)) + .message(mapperError.message) + } + + mapperValidationResults.add(validateRes) + } + + return WebBackendValidateMappersResponse() + .initialFields(initialFields) + .mappers(mapperValidationResults) + } + + private fun convertMapperErrorType(mapperErrorType: DestinationCatalogGenerator.MapperErrorType): MapperValidationErrorType { + return when (mapperErrorType) { + DestinationCatalogGenerator.MapperErrorType.MISSING_MAPPER -> MapperValidationErrorType.MISSING_MAPPER + DestinationCatalogGenerator.MapperErrorType.INVALID_MAPPER_CONFIG -> MapperValidationErrorType.INVALID_MAPPER_CONFIG + DestinationCatalogGenerator.MapperErrorType.FIELD_NOT_FOUND -> MapperValidationErrorType.FIELD_NOT_FOUND + DestinationCatalogGenerator.MapperErrorType.FIELD_ALREADY_EXISTS -> MapperValidationErrorType.FIELD_ALREADY_EXISTS + } + } + + private fun convertFieldType(fieldType: FieldType): FieldSpec.TypeEnum { + return when (fieldType) { + FieldType.STRING -> FieldSpec.TypeEnum.STRING + FieldType.BOOLEAN -> FieldSpec.TypeEnum.BOOLEAN + FieldType.DATE -> FieldSpec.TypeEnum.DATE + FieldType.TIMESTAMP_WITHOUT_TIMEZONE -> FieldSpec.TypeEnum.TIMESTAMP_WITHOUT_TIMEZONE + FieldType.TIMESTAMP_WITH_TIMEZONE -> FieldSpec.TypeEnum.TIMESTAMP_WITH_TIMEZONE + FieldType.TIME_WITHOUT_TIMEZONE -> FieldSpec.TypeEnum.TIME_WITHOUT_TIMEZONE + FieldType.TIME_WITH_TIMEZONE -> FieldSpec.TypeEnum.TIME_WITH_TIMEZONE + FieldType.INTEGER -> FieldSpec.TypeEnum.INTEGER + FieldType.NUMBER -> FieldSpec.TypeEnum.NUMBER + FieldType.ARRAY -> FieldSpec.TypeEnum.ARRAY + FieldType.OBJECT -> FieldSpec.TypeEnum.OBJECT + FieldType.MULTI -> FieldSpec.TypeEnum.MULTI + FieldType.UNKNOWN -> FieldSpec.TypeEnum.UNKNOWN + } + } +} diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/services/DataplaneService.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/services/DataplaneService.kt new file mode 100644 index 00000000000..21d0a7f00f0 --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/services/DataplaneService.kt @@ -0,0 +1,146 @@ +package io.airbyte.server.services + +import io.airbyte.api.model.generated.ActorType +import io.airbyte.api.model.generated.WorkloadPriority +import io.airbyte.api.problems.model.generated.ProblemMessageData +import io.airbyte.api.problems.throwable.generated.BadRequestProblem +import io.airbyte.config.ConfigScopeType +import io.airbyte.config.Geography +import io.airbyte.config.StandardSync +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.DestinationService +import io.airbyte.data.services.ScopedConfigurationService +import io.airbyte.data.services.SourceService +import io.airbyte.data.services.WorkspaceService +import io.airbyte.data.services.shared.NetworkSecurityTokenKey +import io.airbyte.featureflag.CloudProvider +import io.airbyte.featureflag.CloudProviderRegion +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.GeographicRegion +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.Priority +import io.airbyte.featureflag.Priority.Companion.HIGH_PRIORITY +import io.airbyte.featureflag.WorkloadApiRouting +import io.airbyte.featureflag.Workspace +import jakarta.inject.Singleton +import jakarta.validation.constraints.NotNull +import java.util.UUID + +@Singleton +class DataplaneService( + private val connectionService: ConnectionService, + private val workspaceService: WorkspaceService, + private val sourceService: SourceService, + private val destinationService: DestinationService, + private val featureFlagClient: FeatureFlagClient, + private val scopedConfigurationService: ScopedConfigurationService, +) { + private fun resolveWorkspaceId( + connection: StandardSync?, + actorType: ActorType?, + actorId: UUID?, + ): UUID { + return connection?.let { + destinationService.getDestinationConnection(connection.destinationId).workspaceId + } ?: actorType?.let { + when (actorType) { + ActorType.SOURCE -> sourceService.getSourceConnection(actorId).workspaceId + ActorType.DESTINATION -> destinationService.getDestinationConnection(actorId).workspaceId + else -> null + } + } ?: run { + throw BadRequestProblem( + ProblemMessageData().message( + "Unable to resolve workspace id for connection [${connection?.connectionId}], actor [${actorType?.name}], actorId [$actorId]", + ), + ) + } + } + + /** + * Given a connectionId and workspaceId, attempt to resolve geography. + */ + private fun getGeography( + connection: StandardSync?, + workspaceId: UUID?, + ): Geography { + try { + return connection?.let { + connection.geography + } ?: workspaceId?.let { + workspaceService.getGeographyForWorkspace(workspaceId) + } ?: Geography.AUTO + } catch (e: Exception) { + throw BadRequestProblem(ProblemMessageData().message("Unable to find geography of for connection [$connection], workspace [$workspaceId]")) + } + } + + /** + * Get queue name from given data. Pulled from the WorkloadService. + */ + fun getQueueName( + connectionId: UUID?, + actorType: ActorType?, + actorId: UUID?, + workspaceId: UUID?, + priority: @NotNull WorkloadPriority, + ): String { + val connection = connectionId?.let { connectionService.getStandardSync(connectionId) } + val resolvedWorkspaceId = workspaceId ?: resolveWorkspaceId(connection, actorType, actorId) + val geography = getGeography(connection, resolvedWorkspaceId) + + getQueueWithScopedConfig(resolvedWorkspaceId, connectionId, geography)?.let { + return it + } + + val context = mutableListOf(io.airbyte.featureflag.Geography(geography.toString()), Workspace(resolvedWorkspaceId)) + if (WorkloadPriority.HIGH == priority) { + context.add(Priority(HIGH_PRIORITY)) + } + connectionId?.let { + context.add(Connection(it)) + } + + return featureFlagClient.stringVariation(WorkloadApiRouting, Multi(context)) + } + + private fun getQueueWithScopedConfig( + workspaceId: UUID, + connectionId: UUID?, + geography: Geography, + ): String? { + val scopedConfigs = + scopedConfigurationService.getScopedConfigurations( + NetworkSecurityTokenKey, + mapOf(ConfigScopeType.WORKSPACE to workspaceId), + ) + + // Very hardcoded for now + val context = + mutableListOf( + CloudProvider(CloudProvider.AWS), + GeographicRegion(geography.toGeographicRegion()), + Workspace(workspaceId.toString()), + CloudProviderRegion(CloudProviderRegion.AWS_US_EAST_1), + ) + + connectionId?.let { + context.add(Connection(it)) + } + + if (scopedConfigs.isNotEmpty()) { + return featureFlagClient.stringVariation(WorkloadApiRouting, Multi(context)) + } + + return null + } +} + +fun Geography.toGeographicRegion(): String { + return when (this) { + Geography.AUTO -> GeographicRegion.US + Geography.US -> GeographicRegion.US + Geography.EU -> GeographicRegion.EU + } +} diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index ce62beec19c..cdb3b43d265 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -12,6 +12,8 @@ micronaut: expire-after-access: 10m platform-compatibility-provider: expire-after-write: 15s + organization-customer-attributes: + expire-after-access: 1h env: cloud-deduction: true executors: @@ -40,7 +42,10 @@ micronaut: channels: enabled: false queues: - enabled: true + # Turning this on causes compatibility issues with Micronaut ClientCredentialsClient, + # which is used to add auth tokens when using keycloak for internal service auth. + # For more information, see https://github.com/airbytehq/airbyte-platform-internal/pull/14655 + enabled: false export: statsd: enabled: ${MICROMETER_METRICS_ENABLED:false} @@ -71,7 +76,7 @@ micronaut: access-logger: enabled: ${HTTP_ACCESS_LOG_ENABLED:false} aggregator: - max-content-length: 52428800 # 50MB + max-content-length: ${NETTY_AGGREGATOR_MAX_CONTENT_LENGTH:52428800} # 50MB max-header-size: ${NETTY_MAX_HEADER_SIZE:32768} max-request-size: 10485760 # 10MB the micronaut default. If we want to bump, double check it works for nginx. @@ -109,6 +114,7 @@ airbyte: state: ${STORAGE_BUCKET_STATE} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT} activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD} + audit-logging: ${STORAGE_BUCKET_AUDIT_LOGGING} azure: connection-string: ${AZURE_STORAGE_CONNECTION_STRING} gcs: @@ -130,6 +136,13 @@ airbyte: timeout-ms: ${CONNECTOR_REGISTRY_TIMEOUT_MS:30000} enterprise: enterprise-source-stubs-url: ${ENTERPRISE_SOURCE_STUBS_URL} + connector-rollout: + gcs: + application-credentials: ${GOOGLE_APPLICATION_CREDENTIALS:} + project-id: ${GCS_AIRBYTE_WAREHOUSE_EXPORTS_PROJECT_ID:} + bucket-name: ${GCS_AIRBYTE_WAREHOUSE_EXPORTS_BUCKET_NAME:} + object-prefix: ${GCS_DATA_SALES_CUSTOMER_ATTRIBUTES_OBJECT_PREFIX:} + deployment-mode: ${DEPLOYMENT_MODE:OSS} support-email-domains: oss: "" @@ -298,8 +311,10 @@ airbyte: server: connection: limits: - max-days: ${MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE:14} - max-jobs: ${MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE:20} + max-days-warning: ${MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_WARNING:4} + max-days: ${MAX_DAYS_OF_ONLY_FAILED_JOBS_BEFORE_CONNECTION_DISABLE:7} + max-jobs-warning: ${MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_WARNING:20} + max-jobs: ${MAX_FAILED_JOBS_IN_A_ROW_BEFORE_CONNECTION_DISABLE:30} max-fields-per-connection: ${MAX_FIELDS_PER_CONNECTION:20000} temporal: queues: @@ -320,7 +335,7 @@ airbyte: airbyte-realm: ${KEYCLOAK_AIRBYTE_REALM:airbyte} web-client-id: ${KEYCLOAK_WEB_CLIENT_ID:airbyte-webapp} client-realm: ${KEYCLOAK_CLIENT_REALM:airbyte} - client-id: ${KEYCLOAK_CLIENT_ID:admin-cli} + client-id: ${KEYCLOAK_ADMIN_CLIENT_ID:admin-cli} realm: ${KEYCLOAK_REALM:master} internal-realm: ${KEYCLOAK_INTERNAL_REALM:_airbyte-internal} username: ${KEYCLOAK_ADMIN_USER:airbyteAdmin} @@ -342,6 +357,11 @@ airbyte: name: ${AIRBYTE_API_AUTH_HEADER_NAME:} value: ${AIRBYTE_API_AUTH_HEADER_VALUE:} + # Enterprise feature + audit: + logging: + enabled: ${AUDIT_LOGGING_ENABLED:false} + temporal: cloud: client: diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/ActorDefinitionVersionApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/ActorDefinitionVersionApiTest.java deleted file mode 100644 index 8e5f455a320..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/ActorDefinitionVersionApiTest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.ActorDefinitionVersionRead; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -/** - * Test class for {@link ActorDefinitionVersionApiController}. - */ -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class ActorDefinitionVersionApiTest extends BaseControllerTest { - - @Test - void testGetActorDefinitionForSource() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { - Mockito.when(actorDefinitionVersionHandler.getActorDefinitionVersionForSourceId(Mockito.any())) - .thenReturn(new ActorDefinitionVersionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/actor_definition_versions/get_for_source"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetActorDefinitionForDestination() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { - Mockito.when(actorDefinitionVersionHandler.getActorDefinitionVersionForDestinationId(Mockito.any())) - .thenReturn(new ActorDefinitionVersionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/actor_definition_versions/get_for_destination"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/AttemptApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/AttemptApiTest.java deleted file mode 100644 index 65489dd1f4c..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/AttemptApiTest.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.InternalOperationResult; -import io.airbyte.api.model.generated.SaveStatsRequestBody; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class AttemptApiTest extends BaseControllerTest { - - @Test - void testSaveState() { - Mockito.when(attemptHandler.saveStats(Mockito.any())) - .thenReturn(new InternalOperationResult()); - final String path = "/api/v1/attempt/save_stats"; - testEndpointStatus( - HttpRequest.POST(path, new SaveStatsRequestBody()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/BaseControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/BaseControllerTest.java deleted file mode 100644 index 3b2218b0f46..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/BaseControllerTest.java +++ /dev/null @@ -1,367 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -import io.airbyte.commons.server.handlers.ActorDefinitionVersionHandler; -import io.airbyte.commons.server.handlers.AttemptHandler; -import io.airbyte.commons.server.handlers.ConnectionsHandler; -import io.airbyte.commons.server.handlers.ConnectorDefinitionSpecificationHandler; -import io.airbyte.commons.server.handlers.DeploymentMetadataHandler; -import io.airbyte.commons.server.handlers.DestinationDefinitionsHandler; -import io.airbyte.commons.server.handlers.DestinationHandler; -import io.airbyte.commons.server.handlers.DiagnosticToolHandler; -import io.airbyte.commons.server.handlers.EnterpriseSourceStubsHandler; -import io.airbyte.commons.server.handlers.HealthCheckHandler; -import io.airbyte.commons.server.handlers.JobHistoryHandler; -import io.airbyte.commons.server.handlers.MatchSearchHandler; -import io.airbyte.commons.server.handlers.NotificationsHandler; -import io.airbyte.commons.server.handlers.OAuthHandler; -import io.airbyte.commons.server.handlers.OpenApiConfigHandler; -import io.airbyte.commons.server.handlers.OperationsHandler; -import io.airbyte.commons.server.handlers.OrganizationsHandler; -import io.airbyte.commons.server.handlers.PermissionHandler; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.airbyte.commons.server.handlers.SourceDefinitionsHandler; -import io.airbyte.commons.server.handlers.SourceHandler; -import io.airbyte.commons.server.handlers.StateHandler; -import io.airbyte.commons.server.handlers.UserHandler; -import io.airbyte.commons.server.handlers.WebBackendCheckUpdatesHandler; -import io.airbyte.commons.server.handlers.WebBackendConnectionsHandler; -import io.airbyte.commons.server.handlers.WebBackendGeographiesHandler; -import io.airbyte.commons.server.handlers.WorkspacesHandler; -import io.airbyte.commons.server.scheduler.SynchronousSchedulerClient; -import io.airbyte.commons.server.support.CurrentUserService; -import io.airbyte.commons.server.validation.ActorDefinitionAccessValidator; -import io.airbyte.persistence.job.JobNotifier; -import io.airbyte.persistence.job.tracker.JobTracker; -import io.micronaut.context.annotation.Replaces; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.http.client.HttpClient; -import io.micronaut.http.client.annotation.Client; -import io.micronaut.http.client.exceptions.HttpClientResponseException; -import io.micronaut.runtime.server.EmbeddedServer; -import io.micronaut.security.utils.SecurityService; -import io.micronaut.test.annotation.MockBean; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import io.temporal.client.WorkflowClient; -import io.temporal.serviceclient.WorkflowServiceStubs; -import jakarta.inject.Inject; -import org.assertj.core.api.Assertions; -import org.assertj.core.api.Condition; -import org.assertj.core.api.InstanceOfAssertFactory; -import org.mockito.Mockito; - -/** - * This is the base class for the test of the controllers. This allows to test that: - *

    - *
  • The path defined at the moment of writing the test exists,
  • - *
  • The return code is the expected one. It could have help to catch that during the migration to - * micronaut, some endpoint return value switch from a 204 NO_CONTENT return code to a 200 OK which - * was a regression,
  • - *
  • It allow to test that the exception thrown by the handler are properly catch by the exception - * handlers and then return an expected HTTP return code,
  • - *
  • It could help with testing the authorization by injecting a user and workspace in the header - * and check that the authorization is properly applied.
  • - *
- */ -@MicronautTest -@SuppressWarnings("PMD.AbstractClassWithoutAbstractMethod") -abstract class BaseControllerTest { - - EnterpriseSourceStubsHandler enterpriseSourceStubsHandler = Mockito.mock(EnterpriseSourceStubsHandler.class); - - @MockBean(EnterpriseSourceStubsHandler.class) - @Replaces(EnterpriseSourceStubsHandler.class) - EnterpriseSourceStubsHandler mmEnterpriseSourceStubsHandler() { - return enterpriseSourceStubsHandler; - } - - ActorDefinitionVersionHandler actorDefinitionVersionHandler = Mockito.mock(ActorDefinitionVersionHandler.class); - - @MockBean(ActorDefinitionVersionHandler.class) - @Replaces(ActorDefinitionVersionHandler.class) - ActorDefinitionVersionHandler mmActorDefinitionVersionHandler() { - return actorDefinitionVersionHandler; - } - - AttemptHandler attemptHandler = Mockito.mock(AttemptHandler.class); - - @MockBean(AttemptHandler.class) - @Replaces(AttemptHandler.class) - AttemptHandler mmAttemptHandler() { - return attemptHandler; - } - - ConnectionsHandler connectionsHandler = Mockito.mock(ConnectionsHandler.class); - - @MockBean(ConnectionsHandler.class) - @Replaces(ConnectionsHandler.class) - ConnectionsHandler mmConnectionsHandler() { - return connectionsHandler; - } - - MatchSearchHandler matchSearchHandler = Mockito.mock(MatchSearchHandler.class); - - @MockBean(MatchSearchHandler.class) - @Replaces(MatchSearchHandler.class) - MatchSearchHandler mmMatchSearchHandler() { - return matchSearchHandler; - } - - UserHandler userHandler = Mockito.mock(UserHandler.class); - - @MockBean(UserHandler.class) - @Replaces(UserHandler.class) - UserHandler mmUserHandler() { - return userHandler; - } - - PermissionHandler permissionHandler = Mockito.mock(PermissionHandler.class); - - @MockBean(PermissionHandler.class) - @Replaces(PermissionHandler.class) - PermissionHandler mmPermissionHandler() { - return permissionHandler; - } - - DestinationHandler destinationHandler = Mockito.mock(DestinationHandler.class); - - @MockBean(DestinationHandler.class) - @Replaces(DestinationHandler.class) - DestinationHandler mmDestinationHandler() { - return destinationHandler; - } - - DestinationDefinitionsHandler destinationDefinitionsHandler = Mockito.mock(DestinationDefinitionsHandler.class); - - @MockBean(DestinationDefinitionsHandler.class) - @Replaces(DestinationDefinitionsHandler.class) - DestinationDefinitionsHandler mmDestinationDefinitionsHandler() { - return destinationDefinitionsHandler; - } - - HealthCheckHandler healthCheckHandler = Mockito.mock(HealthCheckHandler.class); - - @MockBean(HealthCheckHandler.class) - @Replaces(HealthCheckHandler.class) - HealthCheckHandler mmHealthCheckHandler() { - return healthCheckHandler; - } - - JobHistoryHandler jobHistoryHandler = Mockito.mock(JobHistoryHandler.class); - - @MockBean(JobHistoryHandler.class) - @Replaces(JobHistoryHandler.class) - JobHistoryHandler mmJobHistoryHandler() { - return jobHistoryHandler; - } - - NotificationsHandler notificationsHandler = Mockito.mock(NotificationsHandler.class); - - @MockBean(NotificationsHandler.class) - @Replaces(NotificationsHandler.class) - NotificationsHandler mmNotificationsHandler() { - return notificationsHandler; - } - - OAuthHandler oAuthHandler = Mockito.mock(OAuthHandler.class); - - @MockBean(OAuthHandler.class) - @Replaces(OAuthHandler.class) - OAuthHandler mmOAuthHandler() { - return oAuthHandler; - } - - OpenApiConfigHandler openApiConfigHandler = Mockito.mock(OpenApiConfigHandler.class); - - @MockBean(OpenApiConfigHandler.class) - @Replaces(OpenApiConfigHandler.class) - OpenApiConfigHandler mmOpenApiConfigHandler() { - return openApiConfigHandler; - } - - OperationsHandler operationsHandler = Mockito.mock(OperationsHandler.class); - - @MockBean(OperationsHandler.class) - @Replaces(OperationsHandler.class) - OperationsHandler mmOperationsHandler() { - return operationsHandler; - } - - SchedulerHandler schedulerHandler = Mockito.mock(SchedulerHandler.class); - - @MockBean(SchedulerHandler.class) - @Replaces(SchedulerHandler.class) - SchedulerHandler mmSchedulerHandler() { - return schedulerHandler; - } - - ConnectorDefinitionSpecificationHandler connectorDefinitionSpecificationHandler = Mockito.mock(ConnectorDefinitionSpecificationHandler.class); - - @MockBean(ConnectorDefinitionSpecificationHandler.class) - @Replaces(ConnectorDefinitionSpecificationHandler.class) - - ConnectorDefinitionSpecificationHandler mmConnectorDefinitionSpecificationHandler() { - return connectorDefinitionSpecificationHandler; - } - - SourceDefinitionsHandler sourceDefinitionsHandler = Mockito.mock(SourceDefinitionsHandler.class); - - @MockBean(SourceDefinitionsHandler.class) - @Replaces(SourceDefinitionsHandler.class) - SourceDefinitionsHandler mmSourceDefinitionsHandler() { - return sourceDefinitionsHandler; - } - - ActorDefinitionAccessValidator actorDefinitionAccessValidator = Mockito.mock(ActorDefinitionAccessValidator.class); - - @MockBean(ActorDefinitionAccessValidator.class) - @Replaces(ActorDefinitionAccessValidator.class) - ActorDefinitionAccessValidator mmActorDefinitionAccessValidator() { - return actorDefinitionAccessValidator; - } - - SourceHandler sourceHandler = Mockito.mock(SourceHandler.class); - - @MockBean(SourceHandler.class) - @Replaces(SourceHandler.class) - SourceHandler mmSourceHandler() { - return sourceHandler; - } - - StateHandler stateHandler = Mockito.mock(StateHandler.class); - - @MockBean(StateHandler.class) - @Replaces(StateHandler.class) - StateHandler mmStateHandler() { - return stateHandler; - } - - WebBackendConnectionsHandler webBackendConnectionsHandler = Mockito.mock(WebBackendConnectionsHandler.class); - - @MockBean(WebBackendConnectionsHandler.class) - @Replaces(WebBackendConnectionsHandler.class) - WebBackendConnectionsHandler mmWebBackendConnectionsHandler() { - return webBackendConnectionsHandler; - } - - WebBackendGeographiesHandler webBackendGeographiesHandler = Mockito.mock(WebBackendGeographiesHandler.class); - - @MockBean(WebBackendGeographiesHandler.class) - @Replaces(WebBackendGeographiesHandler.class) - WebBackendGeographiesHandler mmWebBackendGeographiesHandler() { - return webBackendGeographiesHandler; - } - - WebBackendCheckUpdatesHandler webBackendCheckUpdatesHandler = Mockito.mock(WebBackendCheckUpdatesHandler.class); - - @MockBean(WebBackendCheckUpdatesHandler.class) - @Replaces(WebBackendCheckUpdatesHandler.class) - WebBackendCheckUpdatesHandler mmWebBackendCheckUpdatesHandler() { - return webBackendCheckUpdatesHandler; - } - - WorkspacesHandler workspacesHandler = Mockito.mock(WorkspacesHandler.class); - - @MockBean(WorkspacesHandler.class) - @Replaces(WorkspacesHandler.class) - WorkspacesHandler mmWorkspacesHandler() { - return workspacesHandler; - } - - OrganizationsHandler organizationsHandler = Mockito.mock(OrganizationsHandler.class); - - @MockBean(OrganizationsHandler.class) - @Replaces(OrganizationsHandler.class) - OrganizationsHandler mmOrganizationsHandler() { - return organizationsHandler; - } - - DeploymentMetadataHandler deploymentMetadataHandler = Mockito.mock(DeploymentMetadataHandler.class); - - @MockBean(DeploymentMetadataHandler.class) - @Replaces(DeploymentMetadataHandler.class) - DeploymentMetadataHandler mmDeploymentMetadataHandler() { - return deploymentMetadataHandler; - } - - DiagnosticToolHandler diagnosticToolHandler = Mockito.mock(DiagnosticToolHandler.class); - - @MockBean(DiagnosticToolHandler.class) - @Replaces(DiagnosticToolHandler.class) - DiagnosticToolHandler mmDiagnosticToolHandler() { - return diagnosticToolHandler; - } - - @MockBean(SynchronousSchedulerClient.class) - @Replaces(SynchronousSchedulerClient.class) - SynchronousSchedulerClient mmSynchronousSchedulerClient() { - return Mockito.mock(SynchronousSchedulerClient.class); - } - - @MockBean(WorkflowClient.class) - @Replaces(WorkflowClient.class) - WorkflowClient mmWorkflowClient() { - return Mockito.mock(WorkflowClient.class); - } - - @MockBean(WorkflowServiceStubs.class) - @Replaces(WorkflowServiceStubs.class) - WorkflowServiceStubs mmWorkflowServiceStubs() { - return Mockito.mock(WorkflowServiceStubs.class); - } - - @MockBean(SecurityService.class) - @Replaces(SecurityService.class) - SecurityService mmSecurityService() { - return Mockito.mock(SecurityService.class); - } - - CurrentUserService currentUserService = Mockito.mock(CurrentUserService.class); - - @MockBean(CurrentUserService.class) - @Replaces(CurrentUserService.class) - CurrentUserService mmCurrentUserService() { - return currentUserService; - } - - @MockBean(JobNotifier.class) - @Replaces(JobNotifier.class) - JobNotifier mmJobNotifier() { - return Mockito.mock(JobNotifier.class); - } - - @MockBean(JobTracker.class) - @Replaces(JobTracker.class) - JobTracker mmJobTracker() { - return Mockito.mock(JobTracker.class); - } - - @Inject - HealthApiController healthApiController; - - @Inject - EmbeddedServer embeddedServer; - - @Inject - @Client("/") - HttpClient client; - - void testEndpointStatus(final HttpRequest request, final HttpStatus expectedStatus) { - assertEquals(expectedStatus, client.toBlocking().exchange(request).getStatus()); - } - - void testErrorEndpointStatus(final HttpRequest request, final HttpStatus expectedStatus) { - Assertions.assertThatThrownBy(() -> client.toBlocking().exchange(request)) - .isInstanceOf(HttpClientResponseException.class) - .asInstanceOf(new InstanceOfAssertFactory(HttpClientResponseException.class, Assertions::assertThat)) - .has(new Condition(exception -> exception.getStatus() == expectedStatus, - "Http status to be %s", expectedStatus)); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiControllerTest.java deleted file mode 100644 index 794cd5ca616..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiControllerTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.ConnectionStream; -import io.airbyte.api.model.generated.ConnectionStreamRequestBody; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.commons.server.handlers.SchedulerHandler; -import io.airbyte.commons.temporal.TemporalJobType; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.micronaut.context.env.Environment; -import io.micronaut.test.annotation.MockBean; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import io.temporal.client.WorkflowClient; -import jakarta.inject.Inject; -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.UUID; -import java.util.stream.Stream; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.provider.Arguments; - -/** - * Micronaut-based test suite for the {@link ConnectionApiController} class. - */ -@MicronautTest(environments = Environment.TEST) -class ConnectionApiControllerTest { - - @Inject - ConnectionApiController connectionApiController; - - @Inject - SchedulerHandler schedulerHandler; - - @Test - void testConnectionStreamReset() throws IOException, ConfigNotFoundException { - final UUID connectionId = UUID.randomUUID(); - final String streamName = "tableA"; - final String streamNamespace = "schemaA"; - final ConnectionStream connectionStream = new ConnectionStream() - .streamName(streamName) - .streamNamespace(streamNamespace); - final ConnectionStreamRequestBody connectionStreamRequestBody = new ConnectionStreamRequestBody() - .connectionId(connectionId) - .streams(List.of(connectionStream)); - final JobInfoRead expectedJobInfoRead = new JobInfoRead(); - - when(schedulerHandler.resetConnectionStream(connectionStreamRequestBody)).thenReturn(expectedJobInfoRead); - - final JobInfoRead jobInfoRead = connectionApiController.resetConnectionStream(connectionStreamRequestBody); - Assertions.assertEquals(expectedJobInfoRead, jobInfoRead); - } - - static Stream uuidJobTypesMatrix() { - return Arrays.stream(TemporalJobType.values()).map(v -> Arguments.of(UUID.randomUUID(), v)); - } - - @MockBean(SchedulerHandler.class) - SchedulerHandler schedulerHandler() { - return mock(SchedulerHandler.class); - } - - @MockBean(WorkflowClient.class) - WorkflowClient workflowClient() { - return mock(WorkflowClient.class); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiTest.java deleted file mode 100644 index e45b33a3ad9..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/ConnectionApiTest.java +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.ConnectionCreate; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionRead; -import io.airbyte.api.model.generated.ConnectionReadList; -import io.airbyte.api.model.generated.ConnectionSearch; -import io.airbyte.api.model.generated.ConnectionUpdate; -import io.airbyte.api.model.generated.InternalOperationResult; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import jakarta.validation.ConstraintViolationException; -import java.io.IOException; -import java.util.HashSet; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class ConnectionApiTest extends BaseControllerTest { - - @Test - void testAutoDisableConnection() throws JsonValidationException, IOException, ConfigNotFoundException { - Mockito.when(connectionsHandler.autoDisableConnection(Mockito.any())) - .thenReturn(new InternalOperationResult()) - .thenThrow(new ConstraintViolationException(new HashSet<>())) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/connections/auto_disable"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionUpdate()), - HttpStatus.BAD_REQUEST); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionUpdate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testCreateConnection() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(connectionsHandler.createConnection(Mockito.any())) - .thenReturn(new ConnectionRead()) - .thenThrow(new ConstraintViolationException(new HashSet<>())); - final String path = "/api/v1/connections/create"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionCreate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionCreate()), - HttpStatus.BAD_REQUEST); - } - - @Test - void testUpdateConnection() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(connectionsHandler.updateConnection(Mockito.any(), Mockito.any(), Mockito.any())) - .thenReturn(new ConnectionRead()) - .thenThrow(new ConstraintViolationException(new HashSet<>())) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/connections/update"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionUpdate()), - HttpStatus.BAD_REQUEST); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionUpdate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testListConnectionsForWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(connectionsHandler.listConnectionsForWorkspace(Mockito.any())) - .thenReturn(new ConnectionReadList()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/connections/list"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testListAllConnectionsForWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(connectionsHandler.listAllConnectionsForWorkspace(Mockito.any())) - .thenReturn(new ConnectionReadList()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/connections/list_all"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testSearchConnections() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { - Mockito.when(matchSearchHandler.searchConnections(Mockito.any())) - .thenReturn(new ConnectionReadList()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/connections/search"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionSearch()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionSearch()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(connectionsHandler.getConnection(Mockito.any())) - .thenReturn(new ConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/connections/get"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testDeleteConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.doNothing() - .doThrow(new ConfigNotFoundException("", "")) - .when(connectionsHandler).deleteConnection(Mockito.any()); - - final String path = "/api/v1/connections/delete"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.NO_CONTENT); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testSyncConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(schedulerHandler.syncConnection(Mockito.any())) - .thenReturn(new JobInfoRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/connections/sync"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testResetConnection() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(schedulerHandler.resetConnection(Mockito.any())) - .thenReturn(new JobInfoRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/connections/reset"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/DeploymentMetadataApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/DeploymentMetadataApiControllerTest.java deleted file mode 100644 index de91d459c15..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/DeploymentMetadataApiControllerTest.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.DeploymentMetadataRead; -import io.airbyte.config.Configs; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.util.UUID; -import org.junit.jupiter.api.Test; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DeploymentMetadataApiControllerTest extends BaseControllerTest { - - @Test - void testFetchDeploymentMetadata() { - final DeploymentMetadataRead deploymentMetadataRead = new DeploymentMetadataRead() - .id(UUID.randomUUID()) - .mode(Configs.DeploymentMode.OSS.name()) - .version("0.2.3"); - when(deploymentMetadataHandler.getDeploymentMetadata()).thenReturn(deploymentMetadataRead); - final String path = "/api/v1/deployment/metadata"; - testEndpointStatus( - HttpRequest.POST(path, null), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationApiTest.java deleted file mode 100644 index 530ae120b00..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationApiTest.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.DestinationCloneRequestBody; -import io.airbyte.api.model.generated.DestinationCreate; -import io.airbyte.api.model.generated.DestinationIdRequestBody; -import io.airbyte.api.model.generated.DestinationRead; -import io.airbyte.api.model.generated.DestinationReadList; -import io.airbyte.api.model.generated.DestinationSearch; -import io.airbyte.api.model.generated.DestinationUpdate; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import jakarta.validation.ConstraintViolationException; -import java.io.IOException; -import java.util.HashSet; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DestinationApiTest extends BaseControllerTest { - - @Test - void testCheckConnectionToDestination() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(schedulerHandler.checkDestinationConnectionFromDestinationId(Mockito.any())) - .thenReturn(new CheckConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destinations/check_connection"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testCheckConnectionToDestinationForUpdate() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(schedulerHandler.checkDestinationConnectionFromDestinationIdForUpdate(Mockito.any())) - .thenReturn(new CheckConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destinations/check_connection_for_update"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationUpdate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testCloneDestination() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationHandler.cloneDestination(Mockito.any())) - .thenReturn(new DestinationRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destinations/clone"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationCloneRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationCloneRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testCreateDestination() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationHandler.createDestination(Mockito.any())) - .thenReturn(new DestinationRead()) - .thenThrow(new ConstraintViolationException(new HashSet<>())); - final String path = "/api/v1/destinations/create"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationCreate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationCreate()), - HttpStatus.BAD_REQUEST); - } - - @Test - void testDeleteDestination() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.doNothing() - .doThrow(new ConfigNotFoundException("", "")) - .when(destinationHandler).deleteDestination(Mockito.any(DestinationIdRequestBody.class)); - - final String path = "/api/v1/destinations/delete"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.NO_CONTENT); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetDestination() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationHandler.getDestination(Mockito.any())) - .thenReturn(new DestinationRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destinations/get"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testListDestination() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationHandler.listDestinationsForWorkspace(Mockito.any())) - .thenReturn(new DestinationReadList()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destinations/list"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testSearchDestination() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationHandler.searchDestinations(Mockito.any())) - .thenReturn(new DestinationReadList()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destinations/search"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationSearch()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationSearch()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpdateDestination() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(destinationHandler.updateDestination(Mockito.any())) - .thenReturn(new DestinationRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destinations/update"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationUpdate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpgradeDestinationVersion() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.doNothing() - .doThrow(new ConfigNotFoundException("", "")) - .when(destinationHandler).upgradeDestinationVersion(Mockito.any()); - final String path = "/api/v1/destinations/upgrade_version"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.NO_CONTENT); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationIdRequestBody()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationDefinitionApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationDefinitionApiTest.java deleted file mode 100644 index 4326a2e3493..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationDefinitionApiTest.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.mockito.Mockito.doThrow; - -import io.airbyte.api.model.generated.ActorDefinitionIdWithScope; -import io.airbyte.api.model.generated.CustomDestinationDefinitionCreate; -import io.airbyte.api.model.generated.DestinationDefinitionIdRequestBody; -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionRead; -import io.airbyte.api.model.generated.DestinationDefinitionReadList; -import io.airbyte.api.model.generated.DestinationDefinitionUpdate; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionRead; -import io.airbyte.api.model.generated.PrivateDestinationDefinitionReadList; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.server.errors.ApplicationErrorKnownException; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import java.util.UUID; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DestinationDefinitionApiTest extends BaseControllerTest { - - @Test - void testCheckConnectionToDestination() throws IOException { - Mockito.when(destinationDefinitionsHandler.createCustomDestinationDefinition(Mockito.any())) - .thenReturn(new DestinationDefinitionRead()); - final String path = "/api/v1/destination_definitions/create_custom"; - testEndpointStatus( - HttpRequest.POST(path, new CustomDestinationDefinitionCreate()), - HttpStatus.OK); - } - - @Test - void testDeleteDestinationDefinition() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.doNothing() - .doThrow(new io.airbyte.config.persistence.ConfigNotFoundException("", "")) - .when(destinationDefinitionsHandler).deleteDestinationDefinition(Mockito.any()); - final String path = "/api/v1/destination_definitions/delete"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionIdRequestBody()), - HttpStatus.NO_CONTENT); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testDeleteDestinationDefinitionNoWriteAccess() { - final UUID destinationDefinitionId = UUID.randomUUID(); - doThrow(new ApplicationErrorKnownException("invalid")).when(actorDefinitionAccessValidator).validateWriteAccess(destinationDefinitionId); - - final String path = "/api/v1/destination_definitions/delete"; - testErrorEndpointStatus( - HttpRequest.POST(path, - new DestinationDefinitionIdRequestBody().destinationDefinitionId(destinationDefinitionId)), - HttpStatus.UNPROCESSABLE_ENTITY); - } - - @Test - void testGetDestinationDefinition() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationDefinitionsHandler.getDestinationDefinition(Mockito.any())) - .thenReturn(new DestinationDefinitionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destination_definitions/get"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetDestinationDefinitionForWorkspace() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationDefinitionsHandler.getDestinationDefinitionForWorkspace(Mockito.any())) - .thenReturn(new DestinationDefinitionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destination_definitions/get_for_workspace"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionIdWithWorkspaceId()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionIdWithWorkspaceId()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGrantDestinationDefinitionToWorkspace() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationDefinitionsHandler.grantDestinationDefinitionToWorkspaceOrOrganization(Mockito.any())) - .thenReturn(new PrivateDestinationDefinitionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destination_definitions/grant_definition"; - testEndpointStatus( - HttpRequest.POST(path, new ActorDefinitionIdWithScope()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ActorDefinitionIdWithScope()), - HttpStatus.NOT_FOUND); - } - - @Test - void testListDestinationDefinitions() throws JsonValidationException, IOException { - Mockito.when(destinationDefinitionsHandler.listDestinationDefinitions()) - .thenReturn(new DestinationDefinitionReadList()); - final String path = "/api/v1/destination_definitions/list"; - testEndpointStatus( - HttpRequest.POST(path, ""), - HttpStatus.OK); - } - - @Test - void testListDestinationDefinitionsForWorkspace() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(destinationDefinitionsHandler.listDestinationDefinitionsForWorkspace(Mockito.any())) - .thenReturn(new DestinationDefinitionReadList()); - final String path = "/api/v1/destination_definitions/list_for_workspace"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testListLatestDestinationDefinitions() { - Mockito.when(destinationDefinitionsHandler.listLatestDestinationDefinitions()) - .thenReturn(new DestinationDefinitionReadList()); - final String path = "/api/v1/destination_definitions/list_latest"; - testEndpointStatus( - HttpRequest.POST(path, ""), - HttpStatus.OK); - } - - @Test - void testListPrivateDestinationDefinitions() throws IOException { - Mockito.when(destinationDefinitionsHandler.listPrivateDestinationDefinitions(Mockito.any())) - .thenReturn(new PrivateDestinationDefinitionReadList()); - final String path = "/api/v1/destination_definitions/list_private"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testRevokeDestinationDefinitionFromWorkspace() throws IOException { - Mockito.doNothing() - .when(destinationDefinitionsHandler).revokeDestinationDefinition(Mockito.any()); - final String path = "/api/v1/destination_definitions/revoke_definition"; - testEndpointStatus( - HttpRequest.POST(path, new ActorDefinitionIdWithScope()), - HttpStatus.OK); - } - - @Test - void testUpdateDestinationDefinition() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(destinationDefinitionsHandler.updateDestinationDefinition(Mockito.any())) - .thenReturn(new DestinationDefinitionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destination_definitions/update"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionUpdate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpdateDestinationDefinitionNoWriteAccess() { - final UUID destinationDefinitionId = UUID.randomUUID(); - doThrow(new ApplicationErrorKnownException("invalid")).when(actorDefinitionAccessValidator).validateWriteAccess(destinationDefinitionId); - - final String path = "/api/v1/destination_definitions/update"; - testErrorEndpointStatus( - HttpRequest.POST(path, - new DestinationDefinitionUpdate().destinationDefinitionId(destinationDefinitionId)), - HttpStatus.UNPROCESSABLE_ENTITY); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiTest.java deleted file mode 100644 index 2019d3cd2f7..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationDefinitionSpecificationApiTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DestinationDefinitionSpecificationApiTest extends BaseControllerTest { - - @Test - void testCheckConnectionToDestination() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(connectorDefinitionSpecificationHandler.getDestinationSpecification(Mockito.any())) - .thenReturn(new DestinationDefinitionSpecificationRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destination_definition_specifications/get"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionIdWithWorkspaceId()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationDefinitionIdWithWorkspaceId()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationOauthApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationOauthApiTest.java deleted file mode 100644 index a724b8bad10..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/DestinationOauthApiTest.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.CompleteDestinationOAuthRequest; -import io.airbyte.api.model.generated.CompleteOAuthResponse; -import io.airbyte.api.model.generated.DestinationOauthConsentRequest; -import io.airbyte.api.model.generated.OAuthConsentRead; -import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DestinationOauthApiTest extends BaseControllerTest { - - @Test - void testCompleteDestinationOAuth() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(oAuthHandler.completeDestinationOAuth(Mockito.any())) - .thenReturn(new CompleteOAuthResponse()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destination_oauths/complete_oauth"; - testEndpointStatus( - HttpRequest.POST(path, new CompleteDestinationOAuthRequest()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new CompleteDestinationOAuthRequest()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetDestinationOAuthConsent() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(oAuthHandler.getDestinationOAuthConsent(Mockito.any())) - .thenReturn(new OAuthConsentRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/destination_oauths/get_consent_url"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationOauthConsentRequest()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationOauthConsentRequest()), - HttpStatus.NOT_FOUND); - } - - @Test - void testDeleteDestination() throws IOException { - Mockito.doNothing() - .when(oAuthHandler).setDestinationInstancewideOauthParams(Mockito.any()); - - final String path = "/api/v1/destination_oauths/oauth_params/create"; - testEndpointStatus( - HttpRequest.POST(path, new SetInstancewideDestinationOauthParamsRequestBody()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/DiagnosticToolApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/DiagnosticToolApiControllerTest.java deleted file mode 100644 index c657292e75f..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/DiagnosticToolApiControllerTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.DiagnosticReportRequestBody; -import io.fabric8.kubernetes.client.KubernetesClient; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.annotation.MockBean; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.File; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -/** - * Micronaut-based test suite for the {@link ConnectionApiController} class. - */ -@MicronautTest -@Requires(env = {Environment.KUBERNETES, Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class DiagnosticToolApiControllerTest extends BaseControllerTest { - - @MockBean(KubernetesClient.class) - KubernetesClient kubernetesClient() { - return Mockito.mock(KubernetesClient.class); - } - - @Test - void testGenerateDiagnosticReport() throws IOException { - final File result = File.createTempFile("test-diagnostic", ""); - result.deleteOnExit(); - Mockito.when(diagnosticToolHandler.generateDiagnosticReport()).thenReturn(result); - final String path = "/api/v1/diagnostic_tool/generate_report"; - testEndpointStatus( - HttpRequest.POST(path, new DiagnosticReportRequestBody()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/HealthCheckApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/HealthCheckApiTest.java deleted file mode 100644 index 31ddf8054b0..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/HealthCheckApiTest.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.HealthCheckRead; -import io.airbyte.commons.server.handlers.HealthCheckHandler; -import org.junit.jupiter.api.Test; - -class HealthCheckApiTest { - - @Test - void testImportDefinitions() { - final HealthCheckHandler healthCheckHandler = mock(HealthCheckHandler.class); - when(healthCheckHandler.health()) - .thenReturn(new HealthCheckRead().available( - false)); - - final HealthApiController configurationApi = new HealthApiController(healthCheckHandler); - - assertFalse(configurationApi.getHealthCheck().getAvailable()); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/InstanceConfigurationApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/InstanceConfigurationApiControllerTest.java deleted file mode 100644 index e729ee896d3..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/InstanceConfigurationApiControllerTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.InstanceConfigurationResponse; -import io.airbyte.commons.server.handlers.InstanceConfigurationHandler; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Replaces; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.core.util.StringUtils; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.annotation.MockBean; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; -import org.mockito.Mockito; - -@MicronautTest -@Requires(property = "mockito.test.enabled", - defaultValue = StringUtils.TRUE, - value = StringUtils.TRUE) -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class InstanceConfigurationApiControllerTest extends BaseControllerTest { - - @Mock - InstanceConfigurationHandler instanceConfigurationHandler = Mockito.mock(InstanceConfigurationHandler.class); - - @MockBean(InstanceConfigurationHandler.class) - @Replaces(InstanceConfigurationHandler.class) - InstanceConfigurationHandler mmInstanceConfigurationHandler() { - return instanceConfigurationHandler; - } - - static String PATH = "/api/v1/instance_configuration"; - - @Test - void testGetInstanceConfiguration() throws IOException { - when(instanceConfigurationHandler.getInstanceConfiguration()) - .thenReturn(new InstanceConfigurationResponse()); - - testEndpointStatus(HttpRequest.GET(PATH), HttpStatus.OK); - } - - @Test - void testSetupInstanceConfiguration() throws ConfigNotFoundException, IOException, JsonValidationException { - when(instanceConfigurationHandler.setupInstanceConfiguration(Mockito.any())) - .thenReturn(new InstanceConfigurationResponse()); - - testEndpointStatus(HttpRequest.POST(PATH + "/setup", new InstanceConfigurationResponse()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/JobRetryStatesApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/JobRetryStatesApiControllerTest.java deleted file mode 100644 index e7d89e4a2c4..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/JobRetryStatesApiControllerTest.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.JobIdRequestBody; -import io.airbyte.api.model.generated.JobRetryStateRequestBody; -import io.airbyte.api.model.generated.RetryStateRead; -import io.airbyte.server.handlers.RetryStatesHandler; -import io.micronaut.context.annotation.Replaces; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.core.util.StringUtils; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.annotation.MockBean; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.Test; -import org.mockito.Mock; -import org.mockito.Mockito; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -@MicronautTest -@Requires(property = "mockito.test.enabled", - defaultValue = StringUtils.TRUE, - value = StringUtils.TRUE) -@Requires(env = {Environment.TEST}) -class JobRetryStatesApiControllerTest extends BaseControllerTest { - - @Mock - RetryStatesHandler handler = Mockito.mock(RetryStatesHandler.class); - - @MockBean(RetryStatesHandler.class) - @Replaces(RetryStatesHandler.class) - RetryStatesHandler mmStreamStatusesHandler() { - return handler; - } - - static String PATH_BASE = "/api/v1/jobs/retry_states"; - static String PATH_GET = PATH_BASE + "/get"; - static String PATH_PUT = PATH_BASE + "/create_or_update"; - - @Test - void getForJobFound() throws Exception { - when(handler.getByJobId(Mockito.any())) - .thenReturn(Optional.of(new RetryStateRead())); - - testEndpointStatus( - HttpRequest.POST( - PATH_GET, - Fixtures.jobIdReq()), - HttpStatus.OK); - } - - @Test - void getForJobNotFound() throws Exception { - when(handler.getByJobId(Mockito.any())) - .thenReturn(Optional.empty()); - - testErrorEndpointStatus( - HttpRequest.POST( - PATH_GET, - Fixtures.jobIdReq()), - HttpStatus.NOT_FOUND); - } - - @Test - void putForJob() throws Exception { - testEndpointStatus( - HttpRequest.POST( - PATH_PUT, - Fixtures.retryPutReq()), - HttpStatus.NO_CONTENT); - } - - static class Fixtures { - - static long jobId1 = 21891253; - - static JobIdRequestBody jobIdReq() { - return new JobIdRequestBody().id(jobId1); - } - - static JobRetryStateRequestBody retryPutReq() { - return new JobRetryStateRequestBody() - .id(UUID.randomUUID()) - .connectionId(UUID.randomUUID()) - .jobId(jobId1) - .successiveCompleteFailures(8) - .totalCompleteFailures(12) - .successivePartialFailures(4) - .totalPartialFailures(42); - } - - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/JobsApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/JobsApiTest.java deleted file mode 100644 index c3d9d1f58dc..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/JobsApiTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.JobCreate; -import io.airbyte.api.model.generated.JobDebugInfoRead; -import io.airbyte.api.model.generated.JobIdRequestBody; -import io.airbyte.api.model.generated.JobInfoRead; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class JobsApiTest extends BaseControllerTest { - - @Test - void testCreateJob() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(schedulerHandler.createJob(Mockito.any())) - .thenReturn(new JobInfoRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/jobs/create"; - testEndpointStatus( - HttpRequest.POST(path, new JobCreate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new JobCreate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testCancelJob() throws IOException { - Mockito.when(schedulerHandler.cancelJob(Mockito.any())) - .thenReturn(new JobInfoRead()); - final String path = "/api/v1/jobs/cancel"; - testEndpointStatus( - HttpRequest.POST(path, new JobIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testGetJobDebugInfo() - throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(jobHistoryHandler.getJobDebugInfo(Mockito.any())) - .thenReturn(new JobDebugInfoRead()) - .thenThrow(new io.airbyte.config.persistence.ConfigNotFoundException("", "")); - final String path = "/api/v1/jobs/get_debug_info"; - testEndpointStatus( - HttpRequest.POST(path, new JobIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new JobIdRequestBody()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/MicronautHealthCheck.java b/airbyte-server/src/test/java/io/airbyte/server/apis/MicronautHealthCheck.java deleted file mode 100644 index d187ff6db4e..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/MicronautHealthCheck.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.HealthCheckRead; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class MicronautHealthCheck extends BaseControllerTest { - - @Test - void testHealth() { - Mockito.when(healthCheckHandler.health()) - .thenReturn(new HealthCheckRead()); - testEndpointStatus( - HttpRequest.GET("/api/v1/health"), HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/NotificationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/NotificationApiTest.java deleted file mode 100644 index 448466f7fa6..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/NotificationApiTest.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.NotificationRead; -import io.airbyte.api.model.generated.NotificationTrigger; -import io.airbyte.api.model.generated.NotificationWebhookConfigValidationRequestBody; -import io.airbyte.api.model.generated.SlackNotificationConfiguration; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class NotificationApiTest extends BaseControllerTest { - - @Test - void testTryWebhookApi() { - Mockito.when(notificationsHandler.tryNotification(Mockito.any(), Mockito.any())) - .thenReturn(new NotificationRead().status(NotificationRead.StatusEnum.SUCCEEDED)); - final String path = "/api/v1/notifications/try_webhook"; - testEndpointStatus( - HttpRequest.POST(path, - new NotificationWebhookConfigValidationRequestBody().notificationTrigger(NotificationTrigger.SYNC_SUCCESS) - .slackConfiguration(new SlackNotificationConfiguration().webhook("webhook"))), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/OperationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/OperationApiTest.java deleted file mode 100644 index fc39a1d5c77..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/OperationApiTest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.CheckOperationRead; -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.OperationCreate; -import io.airbyte.api.model.generated.OperationIdRequestBody; -import io.airbyte.api.model.generated.OperationRead; -import io.airbyte.api.model.generated.OperationReadList; -import io.airbyte.api.model.generated.OperationUpdate; -import io.airbyte.api.model.generated.OperatorConfiguration; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class OperationApiTest extends BaseControllerTest { - - @Test - void testCheckOperation() { - Mockito.when(operationsHandler.checkOperation(Mockito.any())) - .thenReturn(new CheckOperationRead()); - final String path = "/api/v1/operations/check"; - testEndpointStatus( - HttpRequest.POST(path, new OperatorConfiguration()), - HttpStatus.OK); - } - - @Test - void testCreateOperation() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(operationsHandler.createOperation(Mockito.any())) - .thenReturn(new OperationRead()); - final String path = "/api/v1/operations/create"; - testEndpointStatus( - HttpRequest.POST(path, new OperationCreate()), - HttpStatus.OK); - } - - @Test - void testDeleteOperation() throws IOException { - Mockito.doNothing() - .when(operationsHandler).deleteOperation(Mockito.any()); - - final String path = "/api/v1/operations/delete"; - testEndpointStatus( - HttpRequest.POST(path, new OperationIdRequestBody()), - HttpStatus.NO_CONTENT); - } - - @Test - void testGetOperation() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(operationsHandler.getOperation(Mockito.any())) - .thenReturn(new OperationRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/operations/get"; - testEndpointStatus( - HttpRequest.POST(path, new OperationIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new OperationIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testListOperationsForConnection() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(operationsHandler.listOperationsForConnection(Mockito.any())) - .thenReturn(new OperationReadList()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/operations/list"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpdateOperation() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(operationsHandler.updateOperation(Mockito.any())) - .thenReturn(new OperationRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/operations/update"; - testEndpointStatus( - HttpRequest.POST(path, new OperationUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new OperationUpdate()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/OrganizationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/OrganizationApiTest.java deleted file mode 100644 index 513adcea0a6..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/OrganizationApiTest.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.OrganizationCreateRequestBody; -import io.airbyte.api.model.generated.OrganizationIdRequestBody; -import io.airbyte.api.model.generated.OrganizationRead; -import io.airbyte.api.model.generated.OrganizationUpdateRequestBody; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class OrganizationApiTest extends BaseControllerTest { - - @Test - void testGetOrganization() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(organizationsHandler.getOrganization(Mockito.any())) - .thenReturn(new OrganizationRead()); - final String path = "/api/v1/organizations/get"; - testEndpointStatus( - HttpRequest.POST(path, new OrganizationIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testUpdateOrganization() throws Exception { - Mockito.when(organizationsHandler.updateOrganization(Mockito.any())) - .thenReturn(new OrganizationRead()); - final String path = "/api/v1/organizations/update"; - testEndpointStatus( - HttpRequest.POST(path, new OrganizationUpdateRequestBody()), - HttpStatus.OK); - } - - @Test - void testCreateOrganization() throws Exception { - Mockito.when(organizationsHandler.createOrganization(Mockito.any())) - .thenReturn(new OrganizationRead()); - final String path = "/api/v1/organizations/create"; - testEndpointStatus( - HttpRequest.POST(path, new OrganizationCreateRequestBody()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java deleted file mode 100644 index eb4ff27ceb9..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/PermissionApiControllerTest.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.PermissionCheckRead; -import io.airbyte.api.model.generated.PermissionCheckRequest; -import io.airbyte.api.model.generated.PermissionCreate; -import io.airbyte.api.model.generated.PermissionIdRequestBody; -import io.airbyte.api.model.generated.PermissionRead; -import io.airbyte.api.model.generated.PermissionReadList; -import io.airbyte.api.model.generated.PermissionUpdate; -import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest; -import io.airbyte.api.model.generated.UserIdRequestBody; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import java.io.IOException; -import java.util.UUID; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class PermissionApiControllerTest extends BaseControllerTest { - - @Test - void testCreatePermission() throws IOException, JsonValidationException { - Mockito.when(permissionHandler.createPermission(Mockito.any())) - .thenReturn(new PermissionRead()); - final String path = "/api/v1/permissions/create"; - testEndpointStatus( - HttpRequest.POST(path, new PermissionCreate().workspaceId(UUID.randomUUID())), - HttpStatus.OK); - } - - @Test - void testGetPermission() throws ConfigNotFoundException, IOException { - Mockito.when(permissionHandler.getPermission(Mockito.any())) - .thenReturn(new PermissionRead()); - final String path = "/api/v1/permissions/get"; - testEndpointStatus( - HttpRequest.POST(path, new PermissionIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testUpdatePermission() throws ConfigNotFoundException, IOException { - final UUID userId = UUID.randomUUID(); - Mockito.when(permissionHandler.getPermission(Mockito.any())) - .thenReturn(new PermissionRead().userId(userId)); - final String path = "/api/v1/permissions/update"; - testEndpointStatus( - HttpRequest.POST(path, new PermissionUpdate().permissionId(UUID.randomUUID())), - HttpStatus.OK); - } - - @Test - void testDeletePermission() { - Mockito.doNothing().when(permissionHandler).deletePermission(Mockito.any()); - final String path = "/api/v1/permissions/delete"; - testEndpointStatus( - HttpRequest.POST(path, new PermissionIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testListPermissionByUser() throws IOException { - Mockito.when(permissionHandler.listPermissionsByUser(Mockito.any())) - .thenReturn(new PermissionReadList()); - final String path = "/api/v1/permissions/list_by_user"; - testEndpointStatus( - HttpRequest.POST(path, new UserIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testCheckPermission() throws IOException { - Mockito.when(permissionHandler.checkPermissions(Mockito.any())) - .thenReturn(new PermissionCheckRead()); - final String path = "/api/v1/permissions/check"; - testEndpointStatus( - HttpRequest.POST(path, new PermissionCheckRequest()), - HttpStatus.OK); - } - - @Test - void testCheckMultipleWorkspacesPermission() throws IOException { - Mockito.when(permissionHandler.permissionsCheckMultipleWorkspaces(Mockito.any())) - .thenReturn(new PermissionCheckRead()); - final String path = "/api/v1/permissions/check_multiple_workspaces"; - testEndpointStatus( - HttpRequest.POST(path, new PermissionsCheckMultipleWorkspacesRequest()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/SchedulerApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/SchedulerApiTest.java deleted file mode 100644 index fd7a045b38b..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/SchedulerApiTest.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.DestinationCoreConfig; -import io.airbyte.api.model.generated.SourceCoreConfig; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class SchedulerApiTest extends BaseControllerTest { - - @Test - void testExecuteDestinationCheckConnection() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(schedulerHandler.checkDestinationConnectionFromDestinationCreate(Mockito.any())) - .thenReturn(new CheckConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/scheduler/destinations/check_connection"; - testEndpointStatus( - HttpRequest.POST(path, new DestinationCoreConfig()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new DestinationCoreConfig()), - HttpStatus.NOT_FOUND); - } - - @Test - void testExecuteSourceCheckConnection() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(schedulerHandler.checkSourceConnectionFromSourceCreate(Mockito.any())) - .thenReturn(new CheckConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/scheduler/sources/check_connection"; - testEndpointStatus( - HttpRequest.POST(path, new SourceCoreConfig()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceCoreConfig()), - HttpStatus.NOT_FOUND); - } - - @Test - void testExecuteSourceDiscoverSchema() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(schedulerHandler.checkSourceConnectionFromSourceCreate(Mockito.any())) - .thenReturn(new CheckConnectionRead()); - final String path = "/api/v1/scheduler/sources/check_connection"; - testEndpointStatus( - HttpRequest.POST(path, new SourceCoreConfig()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/SourceApiTest.java deleted file mode 100644 index 0a4cea07f57..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceApiTest.java +++ /dev/null @@ -1,219 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.ActorCatalogWithUpdatedAt; -import io.airbyte.api.model.generated.CheckConnectionRead; -import io.airbyte.api.model.generated.DiscoverCatalogResult; -import io.airbyte.api.model.generated.SourceCloneRequestBody; -import io.airbyte.api.model.generated.SourceCreate; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRead; -import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody; -import io.airbyte.api.model.generated.SourceDiscoverSchemaWriteRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.SourceRead; -import io.airbyte.api.model.generated.SourceReadList; -import io.airbyte.api.model.generated.SourceSearch; -import io.airbyte.api.model.generated.SourceUpdate; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class SourceApiTest extends BaseControllerTest { - - @Test - void testCheckConnectionToSource() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(schedulerHandler.checkSourceConnectionFromSourceId(Mockito.any())) - .thenReturn(new CheckConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/check_connection"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testCheckConnectionToSourceForUpdate() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(schedulerHandler.checkSourceConnectionFromSourceIdForUpdate(Mockito.any())) - .thenReturn(new CheckConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/check_connection_for_update"; - testEndpointStatus( - HttpRequest.POST(path, new SourceUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceUpdate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testCloneSource() throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(sourceHandler.cloneSource(Mockito.any())) - .thenReturn(new SourceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/clone"; - testEndpointStatus( - HttpRequest.POST(path, new SourceCloneRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceCloneRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testCreateSource() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(sourceHandler.createSourceWithOptionalSecret(Mockito.any())) - .thenReturn(new SourceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/create"; - testEndpointStatus( - HttpRequest.POST(path, new SourceCreate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceCreate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testDeleteSource() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.doNothing() - .doThrow(new ConfigNotFoundException("", "")) - .when(sourceHandler).deleteSource(Mockito.any(SourceIdRequestBody.class)); - - final String path = "/api/v1/sources/delete"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NO_CONTENT); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testDiscoverSchemaForSource() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(schedulerHandler.discoverSchemaForSourceFromSourceId(Mockito.any())) - .thenReturn(new SourceDiscoverSchemaRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/discover_schema"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDiscoverSchemaRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDiscoverSchemaRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetSource() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(sourceHandler.getSource(Mockito.any())) - .thenReturn(new SourceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/get"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetMostRecentSourceActorCatalog() throws IOException { - Mockito.when(sourceHandler.getMostRecentSourceActorCatalogWithUpdatedAt(Mockito.any())) - .thenReturn(new ActorCatalogWithUpdatedAt()); - final String path = "/api/v1/sources/most_recent_source_actor_catalog"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testListSourcesForWorkspace() - throws JsonValidationException, IOException, ConfigNotFoundException { - Mockito.when(sourceHandler.listSourcesForWorkspace(Mockito.any())) - .thenReturn(new SourceReadList()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/list"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testSearchSources() throws JsonValidationException, IOException, ConfigNotFoundException { - Mockito.when(sourceHandler.searchSources(Mockito.any())) - .thenReturn(new SourceReadList()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/search"; - testEndpointStatus( - HttpRequest.POST(path, new SourceSearch()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceSearch()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpdateSources() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.when(sourceHandler.updateSource(Mockito.any())) - .thenReturn(new SourceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/sources/update"; - testEndpointStatus( - HttpRequest.POST(path, new SourceUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceUpdate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testWriteDiscoverCatalogResult() throws JsonValidationException, IOException { - Mockito.when(sourceHandler.writeDiscoverCatalogResult(Mockito.any())) - .thenReturn(new DiscoverCatalogResult()); - final String path = "/api/v1/sources/write_discover_catalog_result"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDiscoverSchemaWriteRequestBody()), - HttpStatus.OK); - } - - @Test - void testUpgradeSourceVersion() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.doNothing() - .doThrow(new ConfigNotFoundException("", "")) - .when(sourceHandler).upgradeSourceVersion(Mockito.any()); - final String path = "/api/v1/sources/upgrade_version"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NO_CONTENT); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceDefinitionApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/SourceDefinitionApiTest.java deleted file mode 100644 index cc1484a7d1d..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceDefinitionApiTest.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.mockito.Mockito.doThrow; - -import io.airbyte.api.model.generated.PrivateSourceDefinitionRead; -import io.airbyte.api.model.generated.PrivateSourceDefinitionReadList; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionRead; -import io.airbyte.api.model.generated.SourceDefinitionReadList; -import io.airbyte.api.model.generated.SourceDefinitionUpdate; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.commons.server.errors.ApplicationErrorKnownException; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import java.util.UUID; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class SourceDefinitionApiTest extends BaseControllerTest { - - @Test - void testCreateCustomSourceDefinition() throws IOException { - Mockito.when(sourceDefinitionsHandler.createCustomSourceDefinition(Mockito.any())) - .thenReturn(new SourceDefinitionRead()); - final String path = "/api/v1/source_definitions/create_custom"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testDeleteSourceDefinition() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.doNothing() - .doThrow(new io.airbyte.config.persistence.ConfigNotFoundException("", "")) - .when(sourceDefinitionsHandler).deleteSourceDefinition(Mockito.any()); - - final String path = "/api/v1/source_definitions/delete"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NO_CONTENT); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testDeleteSourceDefinitionNoWriteAccess() { - final UUID sourceDefinitionId = UUID.randomUUID(); - doThrow(new ApplicationErrorKnownException("invalid")).when(actorDefinitionAccessValidator).validateWriteAccess(sourceDefinitionId); - - final String path = "/api/v1/source_definitions/delete"; - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody().sourceDefinitionId(sourceDefinitionId)), - HttpStatus.UNPROCESSABLE_ENTITY); - } - - @Test - void testGetSourceDefinition() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(sourceDefinitionsHandler.getSourceDefinition(Mockito.any())) - .thenReturn(new SourceDefinitionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/source_definitions/get"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetSourceDefinitionForWorkspace() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(sourceDefinitionsHandler.getSourceDefinitionForWorkspace(Mockito.any())) - .thenReturn(new SourceDefinitionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/source_definitions/get_for_workspace"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdWithWorkspaceId()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdWithWorkspaceId()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGrantSourceDefinitionToWorkspace() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(sourceDefinitionsHandler.grantSourceDefinitionToWorkspaceOrOrganization(Mockito.any())) - .thenReturn(new PrivateSourceDefinitionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/source_definitions/grant_definition"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdWithWorkspaceId()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdWithWorkspaceId()), - HttpStatus.NOT_FOUND); - } - - @Test - void testListLatestSourceDefinitions() { - Mockito.when(sourceDefinitionsHandler.listLatestSourceDefinitions()) - .thenReturn(new SourceDefinitionReadList()); - final String path = "/api/v1/source_definitions/list_latest"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdWithWorkspaceId()), - HttpStatus.OK); - } - - @Test - void testListPrivateSourceDefinitions() throws IOException { - Mockito.when(sourceDefinitionsHandler.listPrivateSourceDefinitions(Mockito.any())) - .thenReturn(new PrivateSourceDefinitionReadList()); - final String path = "/api/v1/source_definitions/list_private"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testListSourceDefinitions() throws IOException { - Mockito.when(sourceDefinitionsHandler.listSourceDefinitions()) - .thenReturn(new SourceDefinitionReadList()); - final String path = "/api/v1/source_definitions/list"; - testEndpointStatus( - HttpRequest.POST(path, ""), - HttpStatus.OK); - } - - @Test - void testListSourceDefinitionsForWorkspace() throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(sourceDefinitionsHandler.listSourceDefinitionsForWorkspace(Mockito.any())) - .thenReturn(new SourceDefinitionReadList()); - final String path = "/api/v1/source_definitions/list_for_workspace"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testRevokeSourceDefinition() throws IOException { - Mockito.doNothing().when(sourceDefinitionsHandler).revokeSourceDefinition(Mockito.any()); - - final String path = "/api/v1/source_definitions/revoke_definition"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdWithWorkspaceId()), - HttpStatus.NO_CONTENT); - } - - @Test - void testUpdateSourceDefinition() - throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(sourceDefinitionsHandler.updateSourceDefinition(Mockito.any())) - .thenReturn(new SourceDefinitionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/source_definitions/update"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionUpdate()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionUpdate()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpdateSourceDefinitionNoWriteAccess() { - final UUID sourceDefinitionId = UUID.randomUUID(); - doThrow(new ApplicationErrorKnownException("invalid")).when(actorDefinitionAccessValidator).validateWriteAccess(sourceDefinitionId); - - final String path = "/api/v1/source_definitions/update"; - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionUpdate().sourceDefinitionId(sourceDefinitionId)), - HttpStatus.UNPROCESSABLE_ENTITY); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiTest.java deleted file mode 100644 index 19494433ad8..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceDefinitionSpecificationApiTest.java +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId; -import io.airbyte.api.model.generated.SourceDefinitionSpecificationRead; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class SourceDefinitionSpecificationApiTest extends BaseControllerTest { - - @Test - void testCreateCustomSourceDefinition() - throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { - Mockito.when(connectorDefinitionSpecificationHandler.getSourceDefinitionSpecification(Mockito.any())) - .thenReturn(new SourceDefinitionSpecificationRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/source_definition_specifications/get"; - testEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdWithWorkspaceId()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdWithWorkspaceId()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceOauthApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/SourceOauthApiTest.java deleted file mode 100644 index 60cd86de7fa..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceOauthApiTest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.CompleteOAuthResponse; -import io.airbyte.api.model.generated.OAuthConsentRead; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class SourceOauthApiTest extends BaseControllerTest { - - @Test - void testCompleteSourceOAuth() - throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(oAuthHandler.completeSourceOAuthHandleReturnSecret(Mockito.any())) - .thenReturn(new CompleteOAuthResponse()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/source_oauths/complete_oauth"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetSourceOAuthConsent() - throws IOException, JsonValidationException, ConfigNotFoundException { - Mockito.when(oAuthHandler.getSourceOAuthConsent(Mockito.any())) - .thenReturn(new OAuthConsentRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/source_oauths/get_consent_url"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testSetInstancewideSourceOauthParams() throws IOException { - Mockito.doNothing() - .when(oAuthHandler).setSourceInstancewideOauthParams(Mockito.any()); - - final String path = "/api/v1/source_oauths/oauth_params/create"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/StateApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/StateApiTest.java deleted file mode 100644 index cd54df4cfa7..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/StateApiTest.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.ConnectionState; -import io.airbyte.api.model.generated.ConnectionStateCreateOrUpdate; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class StateApiTest extends BaseControllerTest { - - @Test - void testCreateOrUpdateState() throws IOException { - Mockito.when(stateHandler.createOrUpdateState(Mockito.any())) - .thenReturn(new ConnectionState()); - final String path = "/api/v1/state/create_or_update"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionStateCreateOrUpdate()), - HttpStatus.OK); - } - - @Test - void testGetState() throws IOException { - Mockito.when(stateHandler.getState(Mockito.any())) - .thenReturn(new ConnectionState()); - final String path = "/api/v1/state/get"; - testEndpointStatus( - HttpRequest.POST(path, new ConnectionIdRequestBody()), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/StreamStatusesApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/StreamStatusesApiControllerTest.java deleted file mode 100644 index 729c395325d..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/StreamStatusesApiControllerTest.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.mockito.Mockito.when; - -import io.airbyte.api.model.generated.ConnectionIdRequestBody; -import io.airbyte.api.model.generated.Pagination; -import io.airbyte.api.model.generated.StreamStatusCreateRequestBody; -import io.airbyte.api.model.generated.StreamStatusIncompleteRunCause; -import io.airbyte.api.model.generated.StreamStatusJobType; -import io.airbyte.api.model.generated.StreamStatusListRequestBody; -import io.airbyte.api.model.generated.StreamStatusRead; -import io.airbyte.api.model.generated.StreamStatusReadList; -import io.airbyte.api.model.generated.StreamStatusRunState; -import io.airbyte.api.model.generated.StreamStatusUpdateRequestBody; -import io.airbyte.server.handlers.StreamStatusesHandler; -import io.micronaut.context.annotation.Replaces; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.core.util.StringUtils; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.annotation.MockBean; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.util.UUID; -import java.util.concurrent.ThreadLocalRandom; -import java.util.stream.Stream; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; -import org.mockito.Mockito; - -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -@MicronautTest -@Requires(property = "mockito.test.enabled", - defaultValue = StringUtils.TRUE, - value = StringUtils.TRUE) -@Requires(env = {Environment.TEST}) -class StreamStatusesApiControllerTest extends BaseControllerTest { - - StreamStatusesHandler handler = Mockito.mock(StreamStatusesHandler.class); - - @MockBean(StreamStatusesHandler.class) - @Replaces(StreamStatusesHandler.class) - StreamStatusesHandler mmStreamStatusesHandler() { - return handler; - } - - static String PATH_BASE = "/api/v1/stream_statuses"; - static String PATH_CREATE = PATH_BASE + "/create"; - static String PATH_UPDATE = PATH_BASE + "/update"; - static String PATH_LIST = PATH_BASE + "/list"; - static String PATH_LATEST_PER_RUN_STATE = PATH_BASE + "/latest_per_run_state"; - - @Test - void testCreateSuccessful() { - when(handler.createStreamStatus(Mockito.any())) - .thenReturn(new StreamStatusRead()); - - testEndpointStatus( - HttpRequest.POST( - PATH_CREATE, - Fixtures.validCreate()), - HttpStatus.CREATED); - } - - @ParameterizedTest - @MethodSource("invalidRunStateCauseMatrix") - void testCreateIncompleteRunCauseRunStateInvariant(final StreamStatusRunState state, final StreamStatusIncompleteRunCause incompleteCause) { - when(handler.createStreamStatus(Mockito.any())) - .thenReturn(new StreamStatusRead()); - - final var invalid = Fixtures.validCreate() - .runState(state) - .incompleteRunCause(incompleteCause); - - testErrorEndpointStatus( - HttpRequest.POST( - PATH_CREATE, - invalid), - HttpStatus.BAD_REQUEST); - } - - @Test - void testUpdateSuccessful() { - when(handler.updateStreamStatus(Mockito.any())) - .thenReturn(new StreamStatusRead()); - - testEndpointStatus( - HttpRequest.POST( - PATH_UPDATE, - Fixtures.validUpdate()), - HttpStatus.OK); - } - - @ParameterizedTest - @MethodSource("invalidRunStateCauseMatrix") - void testUpdateIncompleteRunCauseRunStateInvariant(final StreamStatusRunState state, final StreamStatusIncompleteRunCause incompleteCause) { - when(handler.updateStreamStatus(Mockito.any())) - .thenReturn(new StreamStatusRead()); - - final var invalid = Fixtures.validUpdate() - .runState(state) - .incompleteRunCause(incompleteCause); - - testErrorEndpointStatus( - HttpRequest.POST( - PATH_UPDATE, - invalid), - HttpStatus.BAD_REQUEST); - } - - private static Stream invalidRunStateCauseMatrix() { - return Stream.of( - Arguments.of(StreamStatusRunState.PENDING, StreamStatusIncompleteRunCause.FAILED), - Arguments.of(StreamStatusRunState.PENDING, StreamStatusIncompleteRunCause.CANCELED), - Arguments.of(StreamStatusRunState.RUNNING, StreamStatusIncompleteRunCause.FAILED), - Arguments.of(StreamStatusRunState.RUNNING, StreamStatusIncompleteRunCause.CANCELED), - Arguments.of(StreamStatusRunState.COMPLETE, StreamStatusIncompleteRunCause.FAILED), - Arguments.of(StreamStatusRunState.COMPLETE, StreamStatusIncompleteRunCause.CANCELED), - Arguments.of(StreamStatusRunState.INCOMPLETE, null)); - } - - @ParameterizedTest - @MethodSource("validPaginationMatrix") - void testListSuccessful(final Pagination pagination) { - when(handler.listStreamStatus(Mockito.any())) - .thenReturn(new StreamStatusReadList()); - - final var valid = Fixtures.validList() - .pagination(pagination); - - testEndpointStatus( - HttpRequest.POST( - PATH_LIST, - valid), - HttpStatus.OK); - } - - private static Stream validPaginationMatrix() { - return Stream.of( - Arguments.of(Fixtures.validPagination()), - Arguments.of(Fixtures.validPagination().rowOffset(30)), - Arguments.of(Fixtures.validPagination().pageSize(100).rowOffset(300)), - Arguments.of(Fixtures.validPagination().pageSize(5).rowOffset(10))); - } - - @ParameterizedTest - @MethodSource("invalidListPaginationMatrix") - void testListInvalidPagination(final Pagination invalidPagination) { - when(handler.listStreamStatus(Mockito.any())) - .thenReturn(new StreamStatusReadList()); - - final var invalid = Fixtures.validList() - .pagination(invalidPagination); - - testErrorEndpointStatus( - HttpRequest.POST( - PATH_LIST, - invalid), - HttpStatus.BAD_REQUEST); - } - - private static Stream invalidListPaginationMatrix() { - return Stream.of( - Arguments.of((Pagination) null), - Arguments.of(Fixtures.validPagination().pageSize(0)), - Arguments.of(Fixtures.validPagination().pageSize(-1)), - Arguments.of(Fixtures.validPagination().rowOffset(-1)), - Arguments.of(Fixtures.validPagination().pageSize(-1).rowOffset(-1)), - Arguments.of(Fixtures.validPagination().pageSize(0).rowOffset(-1)), - Arguments.of(Fixtures.validPagination().pageSize(10).rowOffset(23)), - Arguments.of(Fixtures.validPagination().pageSize(20).rowOffset(10)), - Arguments.of(Fixtures.validPagination().pageSize(100).rowOffset(50))); - } - - @Test - void testListPerRunStateSuccessful() { - final var req = new ConnectionIdRequestBody().connectionId(UUID.randomUUID()); - - when(handler.listStreamStatusPerRunState(req)) - .thenReturn(new StreamStatusReadList()); - - testEndpointStatus( - HttpRequest.POST( - PATH_LATEST_PER_RUN_STATE, - req), - HttpStatus.OK); - } - - static class Fixtures { - - static String testNamespace = "test_"; - static String testName = "table_1"; - static UUID workspaceId = UUID.randomUUID(); - static UUID connectionId = UUID.randomUUID(); - static Long jobId = ThreadLocalRandom.current().nextLong(); - static Long transitionedAtMs = System.currentTimeMillis(); - - static StreamStatusCreateRequestBody validCreate() { - return new StreamStatusCreateRequestBody() - .workspaceId(workspaceId) - .connectionId(connectionId) - .jobId(jobId) - .jobType(StreamStatusJobType.SYNC) - .attemptNumber(0) - .streamNamespace(testNamespace) - .streamName(testName) - .runState(StreamStatusRunState.PENDING) - .transitionedAt(transitionedAtMs); - } - - static StreamStatusUpdateRequestBody validUpdate() { - return new StreamStatusUpdateRequestBody() - .workspaceId(workspaceId) - .connectionId(connectionId) - .jobId(jobId) - .jobType(StreamStatusJobType.SYNC) - .attemptNumber(0) - .streamNamespace(testNamespace) - .streamName(testName) - .runState(StreamStatusRunState.PENDING) - .transitionedAt(transitionedAtMs) - .id(UUID.randomUUID()); - } - - static Pagination validPagination() { - return new Pagination() - .pageSize(10) - .rowOffset(0); - } - - static StreamStatusListRequestBody validList() { - return new StreamStatusListRequestBody() - .workspaceId(UUID.randomUUID()) - .jobId(ThreadLocalRandom.current().nextLong()) - .pagination(validPagination()); - } - - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/UserApiControllerTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/UserApiControllerTest.java deleted file mode 100644 index 81a7dde4d40..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/UserApiControllerTest.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.OrganizationIdRequestBody; -import io.airbyte.api.model.generated.OrganizationUserReadList; -import io.airbyte.api.model.generated.UserAuthIdRequestBody; -import io.airbyte.api.model.generated.UserEmailRequestBody; -import io.airbyte.api.model.generated.UserGetOrCreateByAuthIdResponse; -import io.airbyte.api.model.generated.UserIdRequestBody; -import io.airbyte.api.model.generated.UserRead; -import io.airbyte.api.model.generated.UserUpdate; -import io.airbyte.api.model.generated.UserWithPermissionInfoReadList; -import io.airbyte.api.model.generated.WorkspaceIdRequestBody; -import io.airbyte.api.model.generated.WorkspaceUserAccessInfoReadList; -import io.airbyte.api.model.generated.WorkspaceUserReadList; -import io.airbyte.commons.json.Jsons; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class UserApiControllerTest extends BaseControllerTest { - - @Test - void testGetUser() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(userHandler.getUser(Mockito.any())) - .thenReturn(new UserRead()); - final String path = "/api/v1/users/get"; - testEndpointStatus( - HttpRequest.POST(path, new UserIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testGetUserByAuthId() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(userHandler.getUserByAuthId(Mockito.any())) - .thenReturn(new UserRead()); - final String path = "/api/v1/users/get_by_auth_id"; - testEndpointStatus( - HttpRequest.POST(path, new UserAuthIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testGetUserByEmail() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(userHandler.getUserByEmail(Mockito.any())) - .thenReturn(new UserRead()); - final String path = "/api/v1/users/get_by_email"; - testEndpointStatus( - HttpRequest.POST(path, new UserEmailRequestBody()), - HttpStatus.OK); - } - - @Test - void testDeleteUser() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.doNothing().when(userHandler).deleteUser(Mockito.any()); - final String path = "/api/v1/users/delete"; - testEndpointStatus( - HttpRequest.POST(path, new UserIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testUpdateUser() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(userHandler.updateUser(Mockito.any())) - .thenReturn(new UserRead()); - final String path = "/api/v1/users/update"; - testEndpointStatus( - HttpRequest.POST(path, new UserUpdate()), - HttpStatus.OK); - } - - @Test - void testListUsersInOrganization() throws IOException, ConfigNotFoundException { - Mockito.when(userHandler.listUsersInOrganization(Mockito.any())) - .thenReturn(new OrganizationUserReadList()); - final String path = "/api/v1/users/list_by_organization_id"; - testEndpointStatus( - HttpRequest.POST(path, new OrganizationIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testListUsersInWorkspace() throws Exception { - Mockito.when(userHandler.listUsersInWorkspace(Mockito.any())) - .thenReturn(new WorkspaceUserReadList()); - final String path = "/api/v1/users/list_by_workspace_id"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testListInstanceAdminUsers() throws Exception { - Mockito.when(userHandler.listInstanceAdminUsers()) - .thenReturn(new UserWithPermissionInfoReadList()); - final String path = "/api/v1/users/list_instance_admin"; - testEndpointStatus( - HttpRequest.POST(path, Jsons.emptyObject()), - HttpStatus.OK); - } - - @Test - void testGetOrCreateUser() throws Exception { - Mockito.when(userHandler.getOrCreateUserByAuthId(Mockito.any())) - .thenReturn(new UserGetOrCreateByAuthIdResponse().userRead(new UserRead())); - final String path = "/api/v1/users/get_or_create_by_auth_id"; - testEndpointStatus( - HttpRequest.POST(path, new UserAuthIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testListAccessInfoByWorkspaceId() throws Exception { - Mockito.when(userHandler.listAccessInfoByWorkspaceId(Mockito.any())) - .thenReturn(new WorkspaceUserAccessInfoReadList()); - final String path = "/api/v1/users/list_access_info_by_workspace_id"; - testEndpointStatus( - HttpRequest.POST(path, Jsons.serialize(new WorkspaceIdRequestBody())), - HttpStatus.OK); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java deleted file mode 100644 index 3856b6135e6..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/WebBackendApiTest.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import io.airbyte.api.model.generated.ConnectionStateType; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.WebBackendCheckUpdatesRead; -import io.airbyte.api.model.generated.WebBackendConnectionRead; -import io.airbyte.api.model.generated.WebBackendConnectionReadList; -import io.airbyte.api.model.generated.WebBackendConnectionRequestBody; -import io.airbyte.api.model.generated.WebBackendGeographiesListResult; -import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult; -import io.airbyte.api.problems.throwable.generated.ForbiddenProblem; -import io.airbyte.commons.server.authorization.ApiAuthorizationHelper; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Primary; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import jakarta.inject.Singleton; -import java.io.IOException; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class WebBackendApiTest extends BaseControllerTest { - - private ApiAuthorizationHelper apiAuthorizationHelper; - - // Due to some strange interaction between Micronaut 3, Java, and Kotlin, the only way to - // mock this Kotlin dependency is to annotate it with @Bean instead of @MockBean, and to - // declare it here instead of within the BaseControllerTest. May be able to move it - // back to BaseControllerTest and use @MockBean after we upgrade to Micronaut 4. - @Singleton - @Primary - ApiAuthorizationHelper mmAirbyteApiAuthorizationHelper() { - return apiAuthorizationHelper; - } - - @BeforeEach - void setup() { - apiAuthorizationHelper = Mockito.mock(ApiAuthorizationHelper.class); - } - - @Test - void testGetStateType() throws IOException { - Mockito.when(webBackendConnectionsHandler.getStateType(Mockito.any())) - .thenReturn(ConnectionStateType.STREAM); - final String path = "/api/v1/web_backend/state/get_type"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testWebBackendCheckUpdates() { - Mockito.when(webBackendCheckUpdatesHandler.checkUpdates()) - .thenReturn(new WebBackendCheckUpdatesRead()); - final String path = "/api/v1/web_backend/check_updates"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testWebBackendCreateConnection() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { - Mockito.when(webBackendConnectionsHandler.webBackendCreateConnection(Mockito.any())) - .thenReturn(new WebBackendConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/web_backend/connections/create"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testWebBackendGetConnection() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { - final String path = "/api/v1/web_backend/connections/get"; - - Mockito.when(webBackendConnectionsHandler.webBackendGetConnection(Mockito.any())) - .thenReturn(new WebBackendConnectionRead()) // first call that makes it here succeeds - .thenReturn(new WebBackendConnectionRead()) // second call that makes it here succeeds - .thenThrow(new ConfigNotFoundException("", "")); // third call that makes it here 404s - - // This only impacts calls where withRefreshCatalog(true) is present - Mockito - .doNothing() // first call that makes it here passes auth check - .doNothing() // second call that makes it here passes auth check but 404s - .doThrow(new ForbiddenProblem()) // third call fails auth check and 403s - .when(apiAuthorizationHelper).checkWorkspacePermissions(Mockito.anyString(), Mockito.any(), Mockito.any()); - - // first call doesn't activate checkWorkspacePermissions because withRefreshedCatalog is false - testEndpointStatus( - HttpRequest.POST(path, new WebBackendConnectionRequestBody()), - HttpStatus.OK); - - // second call activates checkWorkspacePermissions because withRefreshedCatalog is true, and passes - // the check - testEndpointStatus( - HttpRequest.POST(path, new WebBackendConnectionRequestBody().connectionId(UUID.randomUUID()).withRefreshedCatalog(true)), - HttpStatus.OK); - - // third call activates checkWorkspacePermissions because withRefreshedCatalog is true, passes it, - // but then fails on the 404 - testErrorEndpointStatus( - HttpRequest.POST(path, new WebBackendConnectionRequestBody().connectionId(UUID.randomUUID()).withRefreshedCatalog(true)), - HttpStatus.NOT_FOUND); - - // fourth call activates checkWorkspacePermissions because withRefreshedCatalog is true, but fails - // the check, so 403s - testErrorEndpointStatus( - HttpRequest.POST(path, new WebBackendConnectionRequestBody().connectionId(UUID.randomUUID()).withRefreshedCatalog(true)), - HttpStatus.FORBIDDEN); - } - - @Test - void testWebBackendGetWorkspaceState() throws IOException { - Mockito.when(webBackendConnectionsHandler.getWorkspaceState(Mockito.any())) - .thenReturn(new WebBackendWorkspaceStateResult()); - final String path = "/api/v1/web_backend/workspace/state"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testWebBackendListConnectionsForWorkspace() - throws IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException, ConfigNotFoundException { - Mockito.when(webBackendConnectionsHandler.webBackendListConnectionsForWorkspace(Mockito.any())) - .thenReturn(new WebBackendConnectionReadList()); - final String path = "/api/v1/web_backend/connections/list"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testWebBackendListGeographies() { - Mockito.when(webBackendGeographiesHandler.listGeographiesOSS()) - .thenReturn(new WebBackendGeographiesListResult()); - final String path = "/api/v1/web_backend/geographies/list"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testWebBackendUpdateConnection() - throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { - Mockito.when(webBackendConnectionsHandler.webBackendUpdateConnection(Mockito.any())) - .thenReturn(new WebBackendConnectionRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/web_backend/connections/update"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/WorkspaceApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/WorkspaceApiTest.java deleted file mode 100644 index bd70b6b8044..00000000000 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/WorkspaceApiTest.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.server.apis; - -import static org.mockito.ArgumentMatchers.anyBoolean; - -import io.airbyte.api.model.generated.PermissionCheckRead; -import io.airbyte.api.model.generated.PermissionCheckRead.StatusEnum; -import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody; -import io.airbyte.api.model.generated.SourceIdRequestBody; -import io.airbyte.api.model.generated.WorkspaceCreate; -import io.airbyte.api.model.generated.WorkspaceCreateWithId; -import io.airbyte.api.model.generated.WorkspaceRead; -import io.airbyte.api.model.generated.WorkspaceReadList; -import io.airbyte.api.model.generated.WorkspaceUpdateOrganization; -import io.airbyte.config.AuthenticatedUser; -import io.airbyte.data.exceptions.ConfigNotFoundException; -import io.airbyte.validation.json.JsonValidationException; -import io.micronaut.context.annotation.Requires; -import io.micronaut.context.env.Environment; -import io.micronaut.http.HttpRequest; -import io.micronaut.http.HttpStatus; -import io.micronaut.test.extensions.junit5.annotation.MicronautTest; -import java.io.IOException; -import java.util.UUID; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@MicronautTest -@Requires(env = {Environment.TEST}) -@SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") -class WorkspaceApiTest extends BaseControllerTest { - - @Test - void testCreateWorkspace() throws JsonValidationException, IOException, ConfigNotFoundException { - Mockito.when(permissionHandler.checkPermissions(Mockito.any())) - .thenReturn(new PermissionCheckRead().status(StatusEnum.SUCCEEDED)) // first call with an orgId succeeds - .thenReturn(new PermissionCheckRead().status(StatusEnum.FAILED)); // second call with an orgId fails - - Mockito.when(workspacesHandler.createWorkspace(Mockito.any())) - .thenReturn(new WorkspaceRead()); - - Mockito.when(currentUserService.getCurrentUser()).thenReturn(new AuthenticatedUser()); - - final String path = "/api/v1/workspaces/create"; - - // no org id, expect 200 - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - - // org id present, permission check succeeds, expect 200 - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceCreate().organizationId(UUID.randomUUID())), - HttpStatus.OK); - - // org id present, permission check fails, expect 403 - testErrorEndpointStatus( - HttpRequest.POST(path, new WorkspaceCreate().organizationId(UUID.randomUUID())), - HttpStatus.FORBIDDEN); - } - - @Test - void testCreateWorkspaceIfNotExist() throws JsonValidationException, IOException, ConfigNotFoundException { - Mockito.when(permissionHandler.checkPermissions(Mockito.any())) - .thenReturn(new PermissionCheckRead().status(StatusEnum.SUCCEEDED)) // first call with an orgId succeeds - .thenReturn(new PermissionCheckRead().status(StatusEnum.FAILED)); // second call with an orgId fails - - Mockito.when(workspacesHandler.createWorkspaceIfNotExist(Mockito.any())) - .thenReturn(new WorkspaceRead()); - - Mockito.when(currentUserService.getCurrentUser()).thenReturn(new AuthenticatedUser()); - - final String path = "/api/v1/workspaces/create_if_not_exist"; - - // no org id, expect 200 - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceCreateWithId()), - HttpStatus.OK); - - // org id present, permission check succeeds, expect 200 - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceCreateWithId().organizationId(UUID.randomUUID())), - HttpStatus.OK); - - // org id present, permission check fails, expect 403 - testErrorEndpointStatus( - HttpRequest.POST(path, new WorkspaceCreateWithId().organizationId(UUID.randomUUID())), - HttpStatus.FORBIDDEN); - } - - @Test - void testDeleteWorkspace() - throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.config.persistence.ConfigNotFoundException { - Mockito.doNothing() - .doThrow(new ConfigNotFoundException("", "")) - .when(workspacesHandler).deleteWorkspace(Mockito.any()); - final String path = "/api/v1/workspaces/delete"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NO_CONTENT); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(workspacesHandler.getWorkspace(Mockito.any())) - .thenReturn(new WorkspaceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/workspaces/get"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetBySlugWorkspace() throws ConfigNotFoundException, IOException { - Mockito.when(workspacesHandler.getWorkspaceBySlug(Mockito.any())) - .thenReturn(new WorkspaceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/workspaces/get_by_slug"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testListWorkspace() throws JsonValidationException, IOException { - Mockito.when(workspacesHandler.listWorkspaces()) - .thenReturn(new WorkspaceReadList()); - final String path = "/api/v1/workspaces/list"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - } - - @Test - void testUpdateWorkspace() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(workspacesHandler.updateWorkspace(Mockito.any())) - .thenReturn(new WorkspaceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/workspaces/update"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpdateWorkspaceOrganization() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(workspacesHandler.updateWorkspaceOrganization(Mockito.any())) - .thenReturn(new WorkspaceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/workspaces/update_organization"; - testEndpointStatus( - HttpRequest.POST(path, new WorkspaceUpdateOrganization()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new WorkspaceUpdateOrganization()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpdateWorkspaceFeedback() throws IOException, ConfigNotFoundException { - Mockito.doNothing() - .doThrow(new ConfigNotFoundException("", "")) - .when(workspacesHandler).setFeedbackDone(Mockito.any()); - final String path = "/api/v1/workspaces/tag_feedback_status_as_done"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testUpdateWorkspaceName() throws JsonValidationException, ConfigNotFoundException, IOException { - Mockito.when(workspacesHandler.updateWorkspaceName(Mockito.any())) - .thenReturn(new WorkspaceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/workspaces/update_name"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceDefinitionIdRequestBody()), - HttpStatus.NOT_FOUND); - } - - @Test - void testGetWorkspaceByConnectionId() throws ConfigNotFoundException { - Mockito.when(workspacesHandler.getWorkspaceByConnectionId(Mockito.any(), anyBoolean())) - .thenReturn(new WorkspaceRead()) - .thenThrow(new ConfigNotFoundException("", "")); - final String path = "/api/v1/workspaces/get_by_connection_id"; - testEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.OK); - testErrorEndpointStatus( - HttpRequest.POST(path, new SourceIdRequestBody()), - HttpStatus.NOT_FOUND); - } - -} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/AssertEndpoint.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/AssertEndpoint.kt new file mode 100644 index 00000000000..c8a0f6ed136 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/AssertEndpoint.kt @@ -0,0 +1,49 @@ + +@file:JvmName("AssertEndpoint") + +package io.airbyte.server + +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.exceptions.HttpClientResponseException +import org.junit.jupiter.api.Assertions.assertEquals + +/** + * Extension function which converts a [HttpRequest] into a [HttpStatus]. + * + * @receiver HttpClient is the micronaut http client + * @param request is the [HttpRequest] to send to the [HttpClient] + * @return the [HttpStatus] of the [request], or an underlying Exception + * + * If you're looking for a way to handle the [HttpClientResponseException] that may be returned from the exchange call, see [statusException]. + */ +internal fun HttpClient.status(request: HttpRequest): HttpStatus = toBlocking().exchange(request).status + +/** + * Extension function which converts a [HttpRequest] into a [HttpStatus]. Additionally, handles the [HttpClientResponseException] exception + * converting it to the underlying [HttpStatus] which it contains. + * + * @receiver HttpClient is the micronaut http client + * @param request is the [HttpRequest] to send to the [HttpClient] + * @return the [HttpStatus] of the [request], or an underlying Exception + */ +internal fun HttpClient.statusException(request: HttpRequest): HttpStatus = + runCatching { + toBlocking().exchange(request).status + }.fold( + onSuccess = { it }, + onFailure = { + if (it is HttpClientResponseException) { + it.status + } else { + throw Exception("unsupported response exception class ${it::class.java}") + } + }, + ) + +/** assertStatus compares two [HttpStatus] values. */ +fun assertStatus( + expected: HttpStatus, + actual: HttpStatus, +): Unit = assertEquals(expected, actual) diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/ActorDefinitionVersionApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/ActorDefinitionVersionApiControllerTest.kt new file mode 100644 index 00000000000..7948687e949 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/ActorDefinitionVersionApiControllerTest.kt @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.ActorDefinitionVersionRead +import io.airbyte.api.model.generated.DestinationIdRequestBody +import io.airbyte.api.model.generated.SourceIdRequestBody +import io.airbyte.commons.server.handlers.ActorDefinitionVersionHandler +import io.airbyte.config.persistence.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class ActorDefinitionVersionApiControllerTest { + @Inject + lateinit var actorDefinitionVersionHandler: ActorDefinitionVersionHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(ActorDefinitionVersionHandler::class) + fun actorDefinitionVersionHandler(): ActorDefinitionVersionHandler = mockk() + + @Test + fun testGetActorDefinitionForSource() { + every { actorDefinitionVersionHandler.getActorDefinitionVersionForSourceId(any()) } returns + ActorDefinitionVersionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/actor_definition_versions/get_for_source" + + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testGetActorDefinitionForDestination() { + every { actorDefinitionVersionHandler.getActorDefinitionVersionForDestinationId(any()) } returns ActorDefinitionVersionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/actor_definition_versions/get_for_destination" + + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationIdRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/AttemptApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/AttemptApiControllerTest.kt new file mode 100644 index 00000000000..b5a27d759d0 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/AttemptApiControllerTest.kt @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.InternalOperationResult +import io.airbyte.api.model.generated.SaveStatsRequestBody +import io.airbyte.commons.server.handlers.AttemptHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class AttemptApiControllerTest { + @Inject + lateinit var attemptHandler: AttemptHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(AttemptHandler::class) + fun attemptHandler(): AttemptHandler = mockk() + + @Test + fun testSaveState() { + every { attemptHandler.saveStats(any()) } returns InternalOperationResult() + + val path = "/api/v1/attempt/save_stats" + + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SaveStatsRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/ConnectionApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/ConnectionApiControllerTest.kt new file mode 100644 index 00000000000..f399ee139af --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/ConnectionApiControllerTest.kt @@ -0,0 +1,212 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.ConnectionCreate +import io.airbyte.api.model.generated.ConnectionIdRequestBody +import io.airbyte.api.model.generated.ConnectionRead +import io.airbyte.api.model.generated.ConnectionReadList +import io.airbyte.api.model.generated.ConnectionSearch +import io.airbyte.api.model.generated.ConnectionStream +import io.airbyte.api.model.generated.ConnectionStreamRequestBody +import io.airbyte.api.model.generated.ConnectionUpdate +import io.airbyte.api.model.generated.JobInfoRead +import io.airbyte.api.model.generated.WorkspaceIdRequestBody +import io.airbyte.commons.server.handlers.ConnectionsHandler +import io.airbyte.commons.server.handlers.MatchSearchHandler +import io.airbyte.commons.server.handlers.OperationsHandler +import io.airbyte.commons.server.handlers.SchedulerHandler +import io.airbyte.commons.server.services.ConnectionService +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.apis.ConnectionApiController +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.context.env.Environment +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import io.temporal.client.WorkflowClient +import jakarta.inject.Inject +import jakarta.validation.ConstraintViolationException +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test +import java.util.UUID + +@MicronautTest(environments = [Environment.TEST]) +internal class ConnectionApiControllerTest { + @Inject + lateinit var connectionApiController: ConnectionApiController + + @Inject + lateinit var schedulerHandler: SchedulerHandler + + @Inject + lateinit var connectionsHandler: ConnectionsHandler + + @Inject + lateinit var matchSearchHandler: MatchSearchHandler + + @Inject + lateinit var operationsHandler: OperationsHandler + + @Inject + lateinit var connectionService: ConnectionService + + @MockBean(SchedulerHandler::class) + fun schedulerHandler(): SchedulerHandler = mockk() + + @MockBean(WorkflowClient::class) + fun workflowClient(): WorkflowClient = mockk() + + @MockBean(ConnectionsHandler::class) + fun connectionHandler(): ConnectionsHandler = mockk() + + @MockBean(MatchSearchHandler::class) + fun matchSearchHandler(): MatchSearchHandler = mockk() + + @MockBean(OperationsHandler::class) + fun operationsHandler(): OperationsHandler = mockk() + + @MockBean(ConnectionService::class) + fun connectionService(): ConnectionService = mockk() + + @Inject + @Client("/") + lateinit var client: HttpClient + + // Disabled because this test somehow causes a failure in the `testConnectionStreamReset` test + // below with the following error: + // java.lang.IllegalStateException: No lock present for object: ConnectionService(#7) + @Disabled + @Test + fun testWarnOrDisableConnection() { + every { connectionService.warnOrDisableForConsecutiveFailures(any(), any()) } returns true andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/connections/auto_disable" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ConnectionUpdate()))) + } + + @Test + fun testCreateConnection() { + every { connectionsHandler.createConnection(any()) } returns ConnectionRead() andThenThrows + ConstraintViolationException(setOf()) + + val path = "/api/v1/connections/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionCreate()))) + assertStatus(HttpStatus.BAD_REQUEST, client.statusException(HttpRequest.POST(path, ConnectionCreate()))) + } + + @Test + fun testUpdateConnection() { + every { connectionsHandler.updateConnection(any(), any(), any()) } returns ConnectionRead() andThenThrows + ConstraintViolationException(setOf()) andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/connections/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionUpdate()))) + assertStatus(HttpStatus.BAD_REQUEST, client.statusException(HttpRequest.POST(path, ConnectionUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ConnectionUpdate()))) + } + + @Test + fun testListConnectionsForWorkspace() { + every { connectionsHandler.listConnectionsForWorkspace(any()) } returns ConnectionReadList() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/connections/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testListAllConnectionsForWorkspace() { + every { connectionsHandler.listAllConnectionsForWorkspace(any()) } returns ConnectionReadList() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/connections/list_all" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testSearchConnections() { + every { matchSearchHandler.searchConnections(any()) } returns ConnectionReadList() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/connections/search" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionSearch()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ConnectionSearch()))) + } + + @Test + fun testGetConnection() { + every { connectionsHandler.getConnection(any()) } returns ConnectionRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/connections/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ConnectionIdRequestBody()))) + } + + @Test + fun testDeleteConnection() { + every { operationsHandler.deleteOperationsForConnection(any()) } returns Unit + every { connectionsHandler.deleteConnection(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/connections/delete" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, ConnectionIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ConnectionIdRequestBody()))) + } + + @Test + fun testSyncConnection() { + every { schedulerHandler.syncConnection(any()) } returns JobInfoRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/connections/sync" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ConnectionUpdate()))) + } + + @Test + fun testResetConnection() { + every { schedulerHandler.resetConnection(any()) } returns JobInfoRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/connections/reset" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ConnectionIdRequestBody()))) + } + + @Test + fun testConnectionStreamReset() { + val connectionId = UUID.randomUUID() + + val streamName = "tableA" + val streamNamespace = "schemaA" + + val connectionStream = + ConnectionStream() + .streamName(streamName) + .streamNamespace(streamNamespace) + + val connectionStreamRequestBody = + ConnectionStreamRequestBody() + .connectionId(connectionId) + .streams(listOf(connectionStream)) + + val expectedJobInfoRead = JobInfoRead() + + every { schedulerHandler.resetConnectionStream(connectionStreamRequestBody) } returns expectedJobInfoRead + + val jobInfoRead = connectionApiController.resetConnectionStream(connectionStreamRequestBody) + Assertions.assertEquals(expectedJobInfoRead, jobInfoRead) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DeploymentMetadataApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DeploymentMetadataApiControllerTest.kt new file mode 100644 index 00000000000..48bdcd8ae6d --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DeploymentMetadataApiControllerTest.kt @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.DeploymentMetadataRead +import io.airbyte.commons.server.handlers.DeploymentMetadataHandler +import io.airbyte.config.Configs +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.context.annotation.Requires +import io.micronaut.context.env.Environment +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.util.UUID + +@MicronautTest +@Requires(env = [Environment.TEST]) +internal class DeploymentMetadataApiControllerTest { + @Inject + lateinit var deploymentMetadataHandler: DeploymentMetadataHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(DeploymentMetadataHandler::class) + fun deploymentMetadataHandler(): DeploymentMetadataHandler = mockk() + + @Test + fun testFetchDeploymentMetadata() { + val deploymentMetadataRead = + DeploymentMetadataRead() + .id(UUID.randomUUID()) + .mode(Configs.DeploymentMode.OSS.name) + .version("0.2.3") + + every { deploymentMetadataHandler.deploymentMetadata } returns deploymentMetadataRead + + val path = "/api/v1/deployment/metadata" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, null))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationApiControllerTest.kt new file mode 100644 index 00000000000..0700cbfec46 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationApiControllerTest.kt @@ -0,0 +1,143 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.CheckConnectionRead +import io.airbyte.api.model.generated.DestinationCloneRequestBody +import io.airbyte.api.model.generated.DestinationCreate +import io.airbyte.api.model.generated.DestinationIdRequestBody +import io.airbyte.api.model.generated.DestinationRead +import io.airbyte.api.model.generated.DestinationReadList +import io.airbyte.api.model.generated.DestinationSearch +import io.airbyte.api.model.generated.DestinationUpdate +import io.airbyte.api.model.generated.WorkspaceIdRequestBody +import io.airbyte.commons.server.handlers.DestinationHandler +import io.airbyte.commons.server.handlers.SchedulerHandler +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import jakarta.validation.ConstraintViolationException +import org.junit.jupiter.api.Test + +@MicronautTest +internal class DestinationApiControllerTest { + @Inject + lateinit var schedulerHandler: SchedulerHandler + + @Inject + lateinit var destinationHandler: DestinationHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(SchedulerHandler::class) + fun scheduler(): SchedulerHandler = mockk() + + @MockBean(DestinationHandler::class) + fun destinationHandler(): DestinationHandler = mockk() + + @Test + fun testCheckConnectionToDestination() { + every { schedulerHandler.checkDestinationConnectionFromDestinationId(any()) } returns CheckConnectionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/check_connection" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationIdRequestBody()))) + } + + @Test + fun testCheckConnectionToDestinationForUpdate() { + every { schedulerHandler.checkDestinationConnectionFromDestinationIdForUpdate(any()) } returns CheckConnectionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/check_connection_for_update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationUpdate()))) + } + + @Test + fun testCloneDestination() { + every { destinationHandler.cloneDestination(any()) } returns DestinationRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/clone" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationCloneRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationCloneRequestBody()))) + } + + @Test + fun testCreateDestination() { + every { destinationHandler.createDestination(any()) } returns DestinationRead() andThenThrows ConstraintViolationException(setOf()) + + val path = "/api/v1/destinations/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationCreate()))) + assertStatus(HttpStatus.BAD_REQUEST, client.statusException(HttpRequest.POST(path, DestinationCreate()))) + } + + @Test + fun testDeleteDestination() { + every { destinationHandler.deleteDestination(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/delete" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, DestinationIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationIdRequestBody()))) + } + + @Test + fun testGetDestination() { + every { destinationHandler.getDestination(any()) } returns DestinationRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationIdRequestBody()))) + } + + @Test + fun testListDestination() { + every { destinationHandler.listDestinationsForWorkspace(any()) } returns DestinationReadList() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testSearchDestination() { + every { destinationHandler.searchDestinations(any()) } returns DestinationReadList() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/search" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationSearch()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationSearch()))) + } + + @Test + fun testUpdateDestination() { + every { destinationHandler.updateDestination(any()) } returns DestinationRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationUpdate()))) + } + + @Test + fun testUpgradeDestinationVersion() { + every { destinationHandler.upgradeDestinationVersion(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destinations/upgrade_version" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, DestinationIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationIdRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationDefinitionApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationDefinitionApiControllerTest.kt new file mode 100644 index 00000000000..73631b9baf2 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationDefinitionApiControllerTest.kt @@ -0,0 +1,188 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.ActorDefinitionIdWithScope +import io.airbyte.api.model.generated.CustomDestinationDefinitionCreate +import io.airbyte.api.model.generated.DestinationDefinitionIdRequestBody +import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId +import io.airbyte.api.model.generated.DestinationDefinitionRead +import io.airbyte.api.model.generated.DestinationDefinitionReadList +import io.airbyte.api.model.generated.DestinationDefinitionUpdate +import io.airbyte.api.model.generated.PrivateDestinationDefinitionRead +import io.airbyte.api.model.generated.PrivateDestinationDefinitionReadList +import io.airbyte.api.model.generated.WorkspaceIdRequestBody +import io.airbyte.commons.server.errors.ApplicationErrorKnownException +import io.airbyte.commons.server.handlers.DestinationDefinitionsHandler +import io.airbyte.commons.server.validation.ActorDefinitionAccessValidator +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.util.UUID + +@MicronautTest(rebuildContext = true) +internal class DestinationDefinitionApiControllerTest { + @Inject + lateinit var destinationDefinitionsHandler: DestinationDefinitionsHandler + + @Inject + lateinit var actorDefinitionAccessValidator: ActorDefinitionAccessValidator + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(DestinationDefinitionsHandler::class) + fun destinationDefinitionsHandler(): DestinationDefinitionsHandler = mockk() + + @MockBean(ActorDefinitionAccessValidator::class) + fun actorDefinitionAccessValidator(): ActorDefinitionAccessValidator = mockk() + + @Test + fun testCheckConnectionToDestination() { + every { actorDefinitionAccessValidator.validateWriteAccess(any()) } returns Unit + every { destinationDefinitionsHandler.createCustomDestinationDefinition(any()) } returns DestinationDefinitionRead() + + val path = "/api/v1/destination_definitions/create_custom" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, CustomDestinationDefinitionCreate()))) + } + + @Test + fun testDeleteDestinationDefinition() { + every { actorDefinitionAccessValidator.validateWriteAccess(any()) } returns Unit + every { destinationDefinitionsHandler.deleteDestinationDefinition(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destination_definitions/delete" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, DestinationDefinitionIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationDefinitionIdRequestBody()))) + } + + @Test + fun testDeleteDestinationDefinitionNoWriteAccess() { + val destinationDefinitionId = UUID.randomUUID() + every { actorDefinitionAccessValidator.validateWriteAccess(destinationDefinitionId) } throws ApplicationErrorKnownException("invalid") + + val path = "/api/v1/destination_definitions/delete" + assertStatus( + HttpStatus.UNPROCESSABLE_ENTITY, + client.statusException( + HttpRequest.POST( + path, + DestinationDefinitionIdRequestBody().destinationDefinitionId(destinationDefinitionId), + ), + ), + ) + } + + @Test + fun testGetDestinationDefinition() { + every { destinationDefinitionsHandler.getDestinationDefinition(any()) } returns DestinationDefinitionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/destination_definitions/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationDefinitionIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationDefinitionIdRequestBody()))) + } + + @Test + fun testGetDestinationDefinitionForWorkspace() { + every { destinationDefinitionsHandler.getDestinationDefinitionForWorkspace(any()) } returns DestinationDefinitionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/destination_definitions/get_for_workspace" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationDefinitionIdWithWorkspaceId()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationDefinitionIdWithWorkspaceId()))) + } + + @Test + fun testGrantDestinationDefinitionToWorkspace() { + every { + destinationDefinitionsHandler.grantDestinationDefinitionToWorkspaceOrOrganization(any()) + } returns PrivateDestinationDefinitionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/destination_definitions/grant_definition" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ActorDefinitionIdWithScope()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ActorDefinitionIdWithScope()))) + } + + @Test + fun testListDestinationDefinitions() { + every { destinationDefinitionsHandler.listDestinationDefinitions() } returns DestinationDefinitionReadList() + + val path = "/api/v1/destination_definitions/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ""))) + } + + @Test + fun testListDestinationDefinitionsForWorkspace() { + every { destinationDefinitionsHandler.listDestinationDefinitionsForWorkspace(any()) } returns DestinationDefinitionReadList() + + val path = "/api/v1/destination_definitions/list_for_workspace" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testListLatestDestinationDefinitions() { + every { destinationDefinitionsHandler.listLatestDestinationDefinitions() } returns DestinationDefinitionReadList() + + val path = "/api/v1/destination_definitions/list_latest" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ""))) + } + + @Test + fun testListPrivateDestinationDefinitions() { + every { destinationDefinitionsHandler.listPrivateDestinationDefinitions(any()) } returns PrivateDestinationDefinitionReadList() + val path = "/api/v1/destination_definitions/list_private" + + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testRevokeDestinationDefinitionFromWorkspace() { + every { destinationDefinitionsHandler.revokeDestinationDefinition(any()) } returns Unit + + val path = "/api/v1/destination_definitions/revoke_definition" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ActorDefinitionIdWithScope()))) + } + + @Test + fun testUpdateDestinationDefinition() { + every { actorDefinitionAccessValidator.validateWriteAccess(any()) } returns Unit + every { destinationDefinitionsHandler.updateDestinationDefinition(any()) } returns DestinationDefinitionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/destination_definitions/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationDefinitionUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationDefinitionUpdate()))) + } + + @Test + fun testUpdateDestinationDefinitionNoWriteAccess() { + val destinationDefinitionId = UUID.randomUUID() + every { actorDefinitionAccessValidator.validateWriteAccess(destinationDefinitionId) } throws ApplicationErrorKnownException("invalid") + + val path = "/api/v1/destination_definitions/update" + assertStatus( + HttpStatus.UNPROCESSABLE_ENTITY, + client.statusException( + HttpRequest.POST( + path, + DestinationDefinitionUpdate().destinationDefinitionId(destinationDefinitionId), + ), + ), + ) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationDefinitionSpecificationApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationDefinitionSpecificationApiControllerTest.kt new file mode 100644 index 00000000000..8d7cd3d8745 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationDefinitionSpecificationApiControllerTest.kt @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.DestinationDefinitionIdWithWorkspaceId +import io.airbyte.api.model.generated.DestinationDefinitionSpecificationRead +import io.airbyte.commons.server.handlers.ConnectorDefinitionSpecificationHandler +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class DestinationDefinitionSpecificationApiControllerTest { + @Inject + lateinit var connectorDefinitionSpecificationHandler: ConnectorDefinitionSpecificationHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(ConnectorDefinitionSpecificationHandler::class) + fun connectorDefinitionSpecificationHandler(): ConnectorDefinitionSpecificationHandler = mockk() + + @Test + fun testCheckConnectionToDestination() { + every { + connectorDefinitionSpecificationHandler.getDestinationSpecification( + any(), + ) + } returns DestinationDefinitionSpecificationRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/destination_definition_specifications/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationDefinitionIdWithWorkspaceId()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationDefinitionIdWithWorkspaceId()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationOauthApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationOauthApiControllerTest.kt new file mode 100644 index 00000000000..495b629b15f --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DestinationOauthApiControllerTest.kt @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.CompleteDestinationOAuthRequest +import io.airbyte.api.model.generated.CompleteOAuthResponse +import io.airbyte.api.model.generated.DestinationOauthConsentRequest +import io.airbyte.api.model.generated.OAuthConsentRead +import io.airbyte.api.model.generated.SetInstancewideDestinationOauthParamsRequestBody +import io.airbyte.commons.server.handlers.OAuthHandler +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.airbyte.validation.json.JsonValidationException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.io.IOException + +@MicronautTest +internal class DestinationOauthApiControllerTest { + @Inject + lateinit var oAuthHandler: OAuthHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(OAuthHandler::class) + fun oauthHandler(): OAuthHandler = mockk() + + @Test + @Throws(JsonValidationException::class, ConfigNotFoundException::class, IOException::class) + fun testCompleteDestinationOAuth() { + every { oAuthHandler.completeDestinationOAuth(any()) } returns CompleteOAuthResponse() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destination_oauths/complete_oauth" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, CompleteDestinationOAuthRequest()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, CompleteDestinationOAuthRequest()))) + } + + @Test + @Throws(JsonValidationException::class, ConfigNotFoundException::class, IOException::class) + fun testGetDestinationOAuthConsent() { + every { oAuthHandler.getDestinationOAuthConsent(any()) } returns OAuthConsentRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/destination_oauths/get_consent_url" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationOauthConsentRequest()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationOauthConsentRequest()))) + } + + @Test + @Throws(IOException::class) + fun testDeleteDestination() { + every { oAuthHandler.setDestinationInstancewideOauthParams(any()) } returns Unit + + val path = "/api/v1/destination_oauths/oauth_params/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SetInstancewideDestinationOauthParamsRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DiagnosticToolApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DiagnosticToolApiControllerTest.kt new file mode 100644 index 00000000000..9fa3ffba2ea --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/DiagnosticToolApiControllerTest.kt @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.DiagnosticReportRequestBody +import io.airbyte.commons.server.handlers.DiagnosticToolHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.fabric8.kubernetes.client.KubernetesClient +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.io.File + +@MicronautTest +internal class DiagnosticToolApiControllerTest { + @Inject + lateinit var diagnosticToolHandler: DiagnosticToolHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(KubernetesClient::class) + fun kubernetesClient(): KubernetesClient = mockk() + + @Test + fun testGenerateDiagnosticReport() { + val result = + File.createTempFile("test-diagnostic", "").also { + it.deleteOnExit() + } + every { diagnosticToolHandler.generateDiagnosticReport() } returns result + + val path = "/api/v1/diagnostic_tool/generate_report" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DiagnosticReportRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/HealthApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/HealthApiControllerTest.kt new file mode 100644 index 00000000000..6a1de2d7575 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/HealthApiControllerTest.kt @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.HealthCheckRead +import io.airbyte.commons.server.handlers.HealthCheckHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class HealthApiControllerTest { + @Inject + lateinit var healthCheckHandler: HealthCheckHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(HealthCheckHandler::class) + fun healthCheckHandler(): HealthCheckHandler = mockk() + + @Test + fun testHealth() { + every { healthCheckHandler.health() } returns HealthCheckRead() + assertStatus(HttpStatus.OK, client.status(HttpRequest.GET("/api/v1/health"))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/HealthCheckControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/HealthCheckControllerTest.kt new file mode 100644 index 00000000000..44432070738 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/HealthCheckControllerTest.kt @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.HealthCheckRead +import io.airbyte.commons.server.handlers.HealthCheckHandler +import io.airbyte.server.apis.HealthApiController +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Test + +internal class HealthCheckControllerTest { + @Test + fun testImportDefinitions() { + val healthCheckHandler: HealthCheckHandler = + mockk { + every { health() } returns HealthCheckRead().available(false) + } + + val configurationApi = HealthApiController(healthCheckHandler) + Assertions.assertFalse(configurationApi.healthCheck.available) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/InstanceConfigurationApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/InstanceConfigurationApiControllerTest.kt new file mode 100644 index 00000000000..5f124f04506 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/InstanceConfigurationApiControllerTest.kt @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.InstanceConfigurationResponse +import io.airbyte.commons.server.handlers.InstanceConfigurationHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +private const val PATH: String = "/api/v1/instance_configuration" + +@MicronautTest +internal class InstanceConfigurationApiControllerTest { + @Inject + lateinit var instanceConfigurationHandler: InstanceConfigurationHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(InstanceConfigurationHandler::class) + fun mmInstanceConfigurationHandler(): InstanceConfigurationHandler = mockk() + + @Test + fun testGetInstanceConfiguration() { + every { instanceConfigurationHandler.instanceConfiguration } returns InstanceConfigurationResponse() + + assertStatus(HttpStatus.OK, client.status(HttpRequest.GET(PATH))) + } + + @Test + fun testSetupInstanceConfiguration() { + every { instanceConfigurationHandler.setupInstanceConfiguration(any()) } returns InstanceConfigurationResponse() + + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST("$PATH/setup", InstanceConfigurationResponse()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/JobRetryStatesApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/JobRetryStatesApiControllerTest.kt new file mode 100644 index 00000000000..533aede425e --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/JobRetryStatesApiControllerTest.kt @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.JobIdRequestBody +import io.airbyte.api.model.generated.JobRetryStateRequestBody +import io.airbyte.api.model.generated.RetryStateRead +import io.airbyte.server.assertStatus +import io.airbyte.server.handlers.RetryStatesHandler +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.util.Optional +import java.util.UUID + +private const val PATH_BASE: String = "/api/v1/jobs/retry_states" +private const val PATH_GET: String = PATH_BASE + "/get" +private const val PATH_PUT: String = PATH_BASE + "/create_or_update" +private const val JOB_ID1: Long = 21891253 +private val jobIdRequestBody = JobIdRequestBody().id(JOB_ID1) +private val jobRetryStateRequestBody = + JobRetryStateRequestBody() + .id(UUID.randomUUID()) + .connectionId(UUID.randomUUID()) + .jobId(JOB_ID1) + .successiveCompleteFailures(8) + .totalCompleteFailures(12) + .successivePartialFailures(4) + .totalPartialFailures(42) + +@MicronautTest +internal class JobRetryStatesApiControllerTest { + @Inject + lateinit var retryStatesHandler: RetryStatesHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(RetryStatesHandler::class) + fun retryStatesHandler(): RetryStatesHandler = mockk() + + @Test + fun forJobFound() { + every { retryStatesHandler.getByJobId(any()) } returns Optional.of(RetryStateRead()) + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(PATH_GET, jobIdRequestBody))) + } + + @Test + fun forJobNotFound() { + every { retryStatesHandler.getByJobId(any()) } returns Optional.empty() + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(PATH_GET, jobIdRequestBody))) + } + + @Test + fun putForJob() { + every { retryStatesHandler.putByJobId(jobRetryStateRequestBody) } returns Unit + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(PATH_PUT, jobRetryStateRequestBody))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/JobsApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/JobsApiControllerTest.kt new file mode 100644 index 00000000000..e90f9af3cc5 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/JobsApiControllerTest.kt @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.JobCreate +import io.airbyte.api.model.generated.JobDebugInfoRead +import io.airbyte.api.model.generated.JobIdRequestBody +import io.airbyte.api.model.generated.JobInfoRead +import io.airbyte.commons.server.handlers.JobHistoryHandler +import io.airbyte.commons.server.handlers.SchedulerHandler +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class JobsApiControllerTest { + @Inject + lateinit var schedulerHandler: SchedulerHandler + + @Inject + lateinit var jobHistoryHandler: JobHistoryHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(SchedulerHandler::class) + fun schedulerHandler(): SchedulerHandler = mockk() + + @MockBean(JobHistoryHandler::class) + fun jobHistoryHandler(): JobHistoryHandler = mockk() + + @Test + fun testCreateJob() { + every { schedulerHandler.createJob(any()) } returns JobInfoRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/jobs/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, JobCreate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, JobCreate()))) + } + + @Test + fun testCancelJob() { + every { schedulerHandler.cancelJob(any()) } returns JobInfoRead() + + val path = "/api/v1/jobs/cancel" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, JobIdRequestBody()))) + } + + @Test + fun testGetJobDebugInfo() { + every { jobHistoryHandler.getJobDebugInfo(any()) } returns JobDebugInfoRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/jobs/get_debug_info" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, JobIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, JobIdRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/NotificationApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/NotificationApiControllerTest.kt new file mode 100644 index 00000000000..8b2671efea5 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/NotificationApiControllerTest.kt @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.NotificationRead +import io.airbyte.api.model.generated.NotificationTrigger +import io.airbyte.api.model.generated.NotificationWebhookConfigValidationRequestBody +import io.airbyte.api.model.generated.SlackNotificationConfiguration +import io.airbyte.commons.server.handlers.NotificationsHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class NotificationApiControllerTest { + @Inject + lateinit var notificationsHandler: NotificationsHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(NotificationsHandler::class) + fun notificationsHandler(): NotificationsHandler = mockk() + + @Test + fun testTryWebhookApi() { + every { notificationsHandler.tryNotification(any(), any()) } returns NotificationRead().status(NotificationRead.StatusEnum.SUCCEEDED) + + val path = "/api/v1/notifications/try_webhook" + assertStatus( + HttpStatus.OK, + client.status( + HttpRequest.POST( + path, + NotificationWebhookConfigValidationRequestBody() + .notificationTrigger(NotificationTrigger.SYNC_SUCCESS) + .slackConfiguration(SlackNotificationConfiguration().webhook("webhook")), + ), + ), + ) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OperationApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OperationApiControllerTest.kt new file mode 100644 index 00000000000..1b44ad0071b --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OperationApiControllerTest.kt @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.CheckOperationRead +import io.airbyte.api.model.generated.ConnectionIdRequestBody +import io.airbyte.api.model.generated.OperationCreate +import io.airbyte.api.model.generated.OperationIdRequestBody +import io.airbyte.api.model.generated.OperationRead +import io.airbyte.api.model.generated.OperationReadList +import io.airbyte.api.model.generated.OperationUpdate +import io.airbyte.api.model.generated.OperatorConfiguration +import io.airbyte.commons.server.handlers.OperationsHandler +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.airbyte.validation.json.JsonValidationException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.io.IOException + +@MicronautTest +internal class OperationApiControllerTest { + @Inject + lateinit var operationsHandler: OperationsHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(OperationsHandler::class) + fun operationsHandler(): OperationsHandler = mockk() + + @Test + fun testCheckOperation() { + every { operationsHandler.checkOperation(any()) } returns CheckOperationRead() + + val path = "/api/v1/operations/check" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, OperatorConfiguration()))) + } + + @Test + fun testCreateOperation() { + every { operationsHandler.createOperation(any()) } returns OperationRead() + + val path = "/api/v1/operations/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, OperationCreate()))) + } + + @Test + fun testDeleteOperation() { + every { operationsHandler.deleteOperation(any()) } returns Unit + + val path = "/api/v1/operations/delete" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, OperationIdRequestBody()))) + } + + @Test + fun testGetOperation() { + every { operationsHandler.getOperation(any()) } returns OperationRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/operations/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, OperationIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, OperationIdRequestBody()))) + } + + @Test + @Throws(IOException::class, JsonValidationException::class, ConfigNotFoundException::class) + fun testListOperationsForConnection() { + every { operationsHandler.listOperationsForConnection(any()) } returns OperationReadList() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/operations/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, ConnectionIdRequestBody()))) + } + + @Test + @Throws(IOException::class, JsonValidationException::class, ConfigNotFoundException::class) + fun testUpdateOperation() { + every { operationsHandler.updateOperation(any()) } returns OperationRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/operations/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, OperationUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, OperationUpdate()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationApiControllerTest.kt new file mode 100644 index 00000000000..895e933813a --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationApiControllerTest.kt @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.OrganizationCreateRequestBody +import io.airbyte.api.model.generated.OrganizationIdRequestBody +import io.airbyte.api.model.generated.OrganizationRead +import io.airbyte.api.model.generated.OrganizationUpdateRequestBody +import io.airbyte.commons.server.handlers.OrganizationsHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.context.annotation.Requires +import io.micronaut.context.env.Environment +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +@Requires(env = [Environment.TEST]) +internal class OrganizationApiControllerTest { + @Inject + lateinit var organizationsHandler: OrganizationsHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(OrganizationsHandler::class) + fun organizationsHandler(): OrganizationsHandler = mockk() + + @Test + fun testGetOrganization() { + every { organizationsHandler.getOrganization(any()) } returns OrganizationRead() + + val path = "/api/v1/organizations/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, OrganizationIdRequestBody()))) + } + + @Test + @Throws(Exception::class) + fun testUpdateOrganization() { + every { organizationsHandler.updateOrganization(any()) } returns OrganizationRead() + + val path = "/api/v1/organizations/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, OrganizationUpdateRequestBody()))) + } + + @Test + @Throws(Exception::class) + fun testCreateOrganization() { + every { organizationsHandler.createOrganization(any()) } returns OrganizationRead() + + val path = "/api/v1/organizations/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, OrganizationCreateRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigControllerTest.kt index 80d314ddca8..5877c7a0ee8 100644 --- a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigControllerTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigControllerTest.kt @@ -2,8 +2,8 @@ package io.airbyte.server.apis.controllers import io.airbyte.api.model.generated.OrganizationPaymentConfigRead import io.airbyte.api.problems.throwable.generated.ResourceNotFoundProblem +import io.airbyte.commons.server.services.OrganizationService import io.airbyte.data.services.OrganizationPaymentConfigService -import io.airbyte.data.services.OrganizationService import io.kotest.assertions.throwables.shouldThrow import io.mockk.every import io.mockk.mockk @@ -11,15 +11,17 @@ import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import java.util.Optional import java.util.UUID +import io.airbyte.data.services.OrganizationService as OrganizationRepository class OrganizationPaymentConfigControllerTest { private var organizationService = mockk() + private var organizationRepository = mockk() private var organizationPaymentConfigService = mockk() private lateinit var controller: OrganizationPaymentConfigController @BeforeEach fun setup() { - controller = OrganizationPaymentConfigController(organizationPaymentConfigService, organizationService) + controller = OrganizationPaymentConfigController(organizationService, organizationPaymentConfigService, organizationRepository) } @Test @@ -43,7 +45,7 @@ class OrganizationPaymentConfigControllerTest { @Test fun `invalid organization id should fail saving payment config`() { val orgId = UUID.randomUUID() - every { organizationService.getOrganization(orgId) } returns Optional.empty() + every { organizationRepository.getOrganization(orgId) } returns Optional.empty() shouldThrow { controller.updateOrganizationPaymentConfig( OrganizationPaymentConfigRead().organizationId(orgId).paymentStatus(OrganizationPaymentConfigRead.PaymentStatusEnum.MANUAL), diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/PermissionApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/PermissionApiControllerTest.kt new file mode 100644 index 00000000000..3df464b8f7b --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/PermissionApiControllerTest.kt @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.PermissionCheckRead +import io.airbyte.api.model.generated.PermissionCheckRequest +import io.airbyte.api.model.generated.PermissionCreate +import io.airbyte.api.model.generated.PermissionIdRequestBody +import io.airbyte.api.model.generated.PermissionRead +import io.airbyte.api.model.generated.PermissionReadList +import io.airbyte.api.model.generated.PermissionUpdate +import io.airbyte.api.model.generated.PermissionsCheckMultipleWorkspacesRequest +import io.airbyte.api.model.generated.UserIdRequestBody +import io.airbyte.commons.server.handlers.PermissionHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.util.UUID + +@MicronautTest +internal class PermissionApiControllerTest { + @Inject + lateinit var permissionHandler: PermissionHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(PermissionHandler::class) + fun permissionHandler(): PermissionHandler = mockk() + + @Test + fun testCreatePermission() { + every { permissionHandler.createPermission(any()) } returns PermissionRead() + + val path = "/api/v1/permissions/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, PermissionCreate().workspaceId(UUID.randomUUID())))) + } + + @Test + fun testGetPermission() { + every { permissionHandler.getPermission(any()) } returns PermissionRead() + + val path = "/api/v1/permissions/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, PermissionIdRequestBody()))) + } + + @Test + fun testUpdatePermission() { + val userId = UUID.randomUUID() + every { permissionHandler.getPermission(any()) } returns PermissionRead().userId(userId) + every { permissionHandler.updatePermission(any()) } returns Unit + + val path = "/api/v1/permissions/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, PermissionUpdate().permissionId(UUID.randomUUID())))) + } + + @Test + fun testDeletePermission() { + every { permissionHandler.deletePermission(any()) } returns Unit + + val path = "/api/v1/permissions/delete" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, PermissionIdRequestBody()))) + } + + @Test + fun testListPermissionByUser() { + every { permissionHandler.listPermissionsByUser(any()) } returns PermissionReadList() + + val path = "/api/v1/permissions/list_by_user" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, UserIdRequestBody()))) + } + + @Test + fun testCheckPermission() { + every { permissionHandler.checkPermissions(any()) } returns PermissionCheckRead() + + val path = "/api/v1/permissions/check" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, PermissionCheckRequest()))) + } + + @Test + fun testCheckMultipleWorkspacesPermission() { + every { permissionHandler.permissionsCheckMultipleWorkspaces(any()) } returns PermissionCheckRead() + + val path = "/api/v1/permissions/check_multiple_workspaces" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, PermissionsCheckMultipleWorkspacesRequest()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SchedulerApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SchedulerApiControllerTest.kt new file mode 100644 index 00000000000..ad8d19f2d2c --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SchedulerApiControllerTest.kt @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.CheckConnectionRead +import io.airbyte.api.model.generated.DestinationCoreConfig +import io.airbyte.api.model.generated.SourceCoreConfig +import io.airbyte.commons.server.handlers.SchedulerHandler +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class SchedulerApiControllerTest { + @Inject + lateinit var schedulerHandler: SchedulerHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(SchedulerHandler::class) + fun schedulerHandler(): SchedulerHandler = mockk() + + @Test + fun testExecuteDestinationCheckConnection() { + every { schedulerHandler.checkDestinationConnectionFromDestinationCreate(any()) } returns CheckConnectionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/scheduler/destinations/check_connection" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, DestinationCoreConfig()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, DestinationCoreConfig()))) + } + + @Test + fun testExecuteSourceCheckConnection() { + every { schedulerHandler.checkSourceConnectionFromSourceCreate(any()) } returns CheckConnectionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/scheduler/sources/check_connection" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceCoreConfig()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceCoreConfig()))) + } + + @Test + fun testExecuteSourceDiscoverSchema() { + every { schedulerHandler.checkSourceConnectionFromSourceCreate(any()) } returns CheckConnectionRead() + + val path = "/api/v1/scheduler/sources/check_connection" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceCoreConfig()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceApiControllerTest.kt new file mode 100644 index 00000000000..7c14f6f92d5 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceApiControllerTest.kt @@ -0,0 +1,170 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.ActorCatalogWithUpdatedAt +import io.airbyte.api.model.generated.CheckConnectionRead +import io.airbyte.api.model.generated.DiscoverCatalogResult +import io.airbyte.api.model.generated.SourceCloneRequestBody +import io.airbyte.api.model.generated.SourceCreate +import io.airbyte.api.model.generated.SourceDiscoverSchemaRead +import io.airbyte.api.model.generated.SourceDiscoverSchemaRequestBody +import io.airbyte.api.model.generated.SourceDiscoverSchemaWriteRequestBody +import io.airbyte.api.model.generated.SourceIdRequestBody +import io.airbyte.api.model.generated.SourceRead +import io.airbyte.api.model.generated.SourceReadList +import io.airbyte.api.model.generated.SourceSearch +import io.airbyte.api.model.generated.SourceUpdate +import io.airbyte.api.model.generated.WorkspaceIdRequestBody +import io.airbyte.commons.server.handlers.SchedulerHandler +import io.airbyte.commons.server.handlers.SourceHandler +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class SourceApiControllerTest { + @Inject + lateinit var schedulerHandler: SchedulerHandler + + @Inject + lateinit var sourceHandler: SourceHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(SchedulerHandler::class) + fun schedulerHandler(): SchedulerHandler = mockk() + + @MockBean(SourceHandler::class) + fun sourceHandler(): SourceHandler = mockk() + + @Test + fun testCheckConnectionToSource() { + every { schedulerHandler.checkSourceConnectionFromSourceId(any()) } returns CheckConnectionRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/sources/check_connection" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testCheckConnectionToSourceForUpdate() { + every { schedulerHandler.checkSourceConnectionFromSourceIdForUpdate(any()) } returns CheckConnectionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/sources/check_connection_for_update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceUpdate()))) + } + + @Test + fun testCloneSource() { + every { sourceHandler.cloneSource(any()) } returns SourceRead() andThenThrows ConfigNotFoundException("", "") + val path = "/api/v1/sources/clone" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceCloneRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceCloneRequestBody()))) + } + + @Test + fun testCreateSource() { + every { sourceHandler.createSourceWithOptionalSecret(any()) } returns SourceRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/sources/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceCreate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceCreate()))) + } + + @Test + fun testDeleteSource() { + every { sourceHandler.deleteSource(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/sources/delete" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testDiscoverSchemaForSource() { + every { schedulerHandler.discoverSchemaForSourceFromSourceId(any()) } returns SourceDiscoverSchemaRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/sources/discover_schema" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceDiscoverSchemaRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDiscoverSchemaRequestBody()))) + } + + @Test + fun testGetSource() { + every { sourceHandler.getSource(any()) } returns SourceRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/sources/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testGetMostRecentSourceActorCatalog() { + every { sourceHandler.getMostRecentSourceActorCatalogWithUpdatedAt(any()) } returns ActorCatalogWithUpdatedAt() + + val path = "/api/v1/sources/most_recent_source_actor_catalog" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testListSourcesForWorkspace() { + every { sourceHandler.listSourcesForWorkspace(any()) } returns SourceReadList() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/sources/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testSearchSources() { + every { sourceHandler.searchSources(any()) } returns SourceReadList() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/sources/search" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceSearch()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceSearch()))) + } + + @Test + fun testUpdateSources() { + every { sourceHandler.updateSource(any()) } returns SourceRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/sources/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceUpdate()))) + } + + @Test + fun testWriteDiscoverCatalogResult() { + every { sourceHandler.writeDiscoverCatalogResult(any()) } returns DiscoverCatalogResult() + + val path = "/api/v1/sources/write_discover_catalog_result" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceDiscoverSchemaWriteRequestBody()))) + } + + @Test + fun testUpgradeSourceVersion() { + every { sourceHandler.upgradeSourceVersion(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/sources/upgrade_version" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceIdRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceDefinitionApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceDefinitionApiControllerTest.kt new file mode 100644 index 00000000000..448a3597404 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceDefinitionApiControllerTest.kt @@ -0,0 +1,174 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.PrivateSourceDefinitionRead +import io.airbyte.api.model.generated.PrivateSourceDefinitionReadList +import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody +import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId +import io.airbyte.api.model.generated.SourceDefinitionRead +import io.airbyte.api.model.generated.SourceDefinitionReadList +import io.airbyte.api.model.generated.SourceDefinitionUpdate +import io.airbyte.api.model.generated.SourceIdRequestBody +import io.airbyte.api.model.generated.WorkspaceIdRequestBody +import io.airbyte.commons.server.errors.ApplicationErrorKnownException +import io.airbyte.commons.server.handlers.SourceDefinitionsHandler +import io.airbyte.commons.server.validation.ActorDefinitionAccessValidator +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.util.UUID + +@MicronautTest(rebuildContext = true) +internal class SourceDefinitionApiControllerTest { + @Inject + lateinit var sourceDefinitionsHandler: SourceDefinitionsHandler + + @Inject + lateinit var actorDefinitionAccessValidator: ActorDefinitionAccessValidator + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(SourceDefinitionsHandler::class) + fun sourceDefinitionsHandler(): SourceDefinitionsHandler = mockk() + + @MockBean(ActorDefinitionAccessValidator::class) + fun actorDefinitionAccessValidator(): ActorDefinitionAccessValidator = mockk() + + @Test + fun testCreateCustomSourceDefinition() { + every { sourceDefinitionsHandler.createCustomSourceDefinition(any()) } returns SourceDefinitionRead() + + val path = "/api/v1/source_definitions/create_custom" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testDeleteSourceDefinition() { + every { actorDefinitionAccessValidator.validateWriteAccess(any()) } returns Unit + every { sourceDefinitionsHandler.deleteSourceDefinition(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/source_definitions/delete" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testDeleteSourceDefinitionNoWriteAccess() { + val sourceDefinitionId = UUID.randomUUID() + every { actorDefinitionAccessValidator.validateWriteAccess(sourceDefinitionId) } throws ApplicationErrorKnownException("invalid") + + val path = "/api/v1/source_definitions/delete" + assertStatus( + HttpStatus.UNPROCESSABLE_ENTITY, + client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody().sourceDefinitionId(sourceDefinitionId))), + ) + } + + @Test + fun testGetSourceDefinition() { + every { actorDefinitionAccessValidator.validateWriteAccess(any()) } returns Unit + every { sourceDefinitionsHandler.getSourceDefinition(any()) } returns SourceDefinitionRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/source_definitions/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testGetSourceDefinitionForWorkspace() { + every { sourceDefinitionsHandler.getSourceDefinitionForWorkspace(any()) } returns SourceDefinitionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/source_definitions/get_for_workspace" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceDefinitionIdWithWorkspaceId()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdWithWorkspaceId()))) + } + + @Test + fun testGrantSourceDefinitionToWorkspace() { + every { sourceDefinitionsHandler.grantSourceDefinitionToWorkspaceOrOrganization(any()) } returns PrivateSourceDefinitionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/source_definitions/grant_definition" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceDefinitionIdWithWorkspaceId()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdWithWorkspaceId()))) + } + + @Test + fun testListLatestSourceDefinitions() { + every { actorDefinitionAccessValidator.validateWriteAccess(any()) } returns Unit + every { sourceDefinitionsHandler.listLatestSourceDefinitions() } returns SourceDefinitionReadList() + + val path = "/api/v1/source_definitions/list_latest" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceDefinitionIdWithWorkspaceId()))) + } + + @Test + fun testListPrivateSourceDefinitions() { + every { sourceDefinitionsHandler.listPrivateSourceDefinitions(any()) } returns PrivateSourceDefinitionReadList() + + val path = "/api/v1/source_definitions/list_private" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testListSourceDefinitions() { + every { sourceDefinitionsHandler.listSourceDefinitions() } returns SourceDefinitionReadList() + + val path = "/api/v1/source_definitions/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ""))) + } + + @Test + fun testListSourceDefinitionsForWorkspace() { + every { sourceDefinitionsHandler.listSourceDefinitionsForWorkspace(any()) } returns SourceDefinitionReadList() + + val path = "/api/v1/source_definitions/list_for_workspace" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testRevokeSourceDefinition() { + every { sourceDefinitionsHandler.revokeSourceDefinition(any()) } returns Unit + + val path = "/api/v1/source_definitions/revoke_definition" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, SourceDefinitionIdWithWorkspaceId()))) + } + + @Test + fun testUpdateSourceDefinition() { + every { actorDefinitionAccessValidator.validateWriteAccess(any()) } returns Unit + every { sourceDefinitionsHandler.updateSourceDefinition(any()) } returns SourceDefinitionRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/source_definitions/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceDefinitionUpdate()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionUpdate()))) + } + + @Test + fun testUpdateSourceDefinitionNoWriteAccess() { + val sourceDefinitionId = UUID.randomUUID() + every { actorDefinitionAccessValidator.validateWriteAccess(sourceDefinitionId) } throws ApplicationErrorKnownException("invalid") + + val path = "/api/v1/source_definitions/update" + assertStatus( + HttpStatus.UNPROCESSABLE_ENTITY, + client.statusException(HttpRequest.POST(path, SourceDefinitionUpdate().sourceDefinitionId(sourceDefinitionId))), + ) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceDefinitionSpecificationApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceDefinitionSpecificationApiControllerTest.kt new file mode 100644 index 00000000000..253b33fbb20 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceDefinitionSpecificationApiControllerTest.kt @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.SourceDefinitionIdWithWorkspaceId +import io.airbyte.api.model.generated.SourceDefinitionSpecificationRead +import io.airbyte.commons.server.handlers.ConnectorDefinitionSpecificationHandler +import io.airbyte.config.persistence.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class SourceDefinitionSpecificationApiControllerTest { + @Inject + lateinit var connectorDefinitionSpecificationHandler: ConnectorDefinitionSpecificationHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(ConnectorDefinitionSpecificationHandler::class) + fun connectorDefinitionSpecificationHandler(): ConnectorDefinitionSpecificationHandler = mockk() + + @Test + fun testCreateCustomSourceDefinition() { + every { + connectorDefinitionSpecificationHandler.getSourceDefinitionSpecification( + any(), + ) + } returns SourceDefinitionSpecificationRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/source_definition_specifications/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceDefinitionIdWithWorkspaceId()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdWithWorkspaceId()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceOauthApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceOauthApiControllerTest.kt new file mode 100644 index 00000000000..a7d767e766c --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/SourceOauthApiControllerTest.kt @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.CompleteOAuthResponse +import io.airbyte.api.model.generated.OAuthConsentRead +import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody +import io.airbyte.api.model.generated.SourceIdRequestBody +import io.airbyte.commons.server.handlers.OAuthHandler +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class SourceOauthApiControllerTest { + @Inject + lateinit var oAuthHandler: OAuthHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(OAuthHandler::class) + fun oAuthHandler(): OAuthHandler = mockk() + + @Test + fun testCompleteSourceOAuth() { + every { oAuthHandler.completeSourceOAuthHandleReturnSecret(any()) } returns CompleteOAuthResponse() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/source_oauths/complete_oauth" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testGetSourceOAuthConsent() { + every { oAuthHandler.getSourceOAuthConsent(any()) } returns OAuthConsentRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/source_oauths/get_consent_url" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testSetInstancewideSourceOauthParams() { + every { oAuthHandler.setSourceInstancewideOauthParams(any()) } returns Unit + + val path = "/api/v1/source_oauths/oauth_params/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/StateApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/StateApiControllerTest.kt new file mode 100644 index 00000000000..86e52f5cd4c --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/StateApiControllerTest.kt @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.ConnectionState +import io.airbyte.api.model.generated.ConnectionStateCreateOrUpdate +import io.airbyte.commons.server.handlers.StateHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class StateApiControllerTest { + @Inject + lateinit var stateHandler: StateHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(StateHandler::class) + fun stateHandler(): StateHandler = mockk() + + @Test + fun testCreateOrUpdateState() { + every { stateHandler.createOrUpdateState(any()) } returns ConnectionState() + + val path = "/api/v1/state/create_or_update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionStateCreateOrUpdate()))) + } + + @Test + fun testGetState() { + every { stateHandler.getState(any()) } returns ConnectionState() + + val path = "/api/v1/state/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, ConnectionState()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/StreamStatusesApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/StreamStatusesApiControllerTest.kt new file mode 100644 index 00000000000..75ea122e896 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/StreamStatusesApiControllerTest.kt @@ -0,0 +1,210 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.ConnectionIdRequestBody +import io.airbyte.api.model.generated.Pagination +import io.airbyte.api.model.generated.StreamStatusCreateRequestBody +import io.airbyte.api.model.generated.StreamStatusIncompleteRunCause +import io.airbyte.api.model.generated.StreamStatusJobType +import io.airbyte.api.model.generated.StreamStatusListRequestBody +import io.airbyte.api.model.generated.StreamStatusRead +import io.airbyte.api.model.generated.StreamStatusReadList +import io.airbyte.api.model.generated.StreamStatusRunState +import io.airbyte.api.model.generated.StreamStatusUpdateRequestBody +import io.airbyte.server.assertStatus +import io.airbyte.server.handlers.StreamStatusesHandler +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.MethodSource +import java.util.UUID +import java.util.concurrent.ThreadLocalRandom +import java.util.stream.Stream +import kotlin.random.Random + +private const val PATH_BASE = "/api/v1/stream_statuses" +private const val PATH_CREATE = "$PATH_BASE/create" +private const val PATH_UPDATE = "$PATH_BASE/update" +private const val PATH_LIST = "$PATH_BASE/list" +private const val PATH_LATEST_PER_RUN_STATE = "$PATH_BASE/latest_per_run_state" + +@MicronautTest +internal class StreamStatusesApiControllerTest { + @Inject + lateinit var handler: StreamStatusesHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(StreamStatusesHandler::class) + fun streamStatusesHandler(): StreamStatusesHandler = mockk() + + @Test + fun testCreateSuccessful() { + every { handler.createStreamStatus(any()) } returns StreamStatusRead() + + assertStatus(HttpStatus.CREATED, client.status(HttpRequest.POST(PATH_CREATE, validCreate()))) + } + + @ParameterizedTest + @MethodSource("invalidRunStateCauseMatrix") + fun testCreateIncompleteRunCauseRunStateInvariant( + state: StreamStatusRunState?, + incompleteCause: StreamStatusIncompleteRunCause?, + ) { + every { handler.createStreamStatus(any()) } returns StreamStatusRead() + + val invalid = + validCreate() + .runState(state) + .incompleteRunCause(incompleteCause) + + assertStatus(HttpStatus.BAD_REQUEST, client.statusException(HttpRequest.POST(PATH_CREATE, invalid))) + } + + @Test + fun testUpdateSuccessful() { + every { handler.updateStreamStatus(any()) } returns StreamStatusRead() + + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(PATH_UPDATE, validUpdate()))) + } + + @ParameterizedTest + @MethodSource("invalidRunStateCauseMatrix") + fun testUpdateIncompleteRunCauseRunStateInvariant( + state: StreamStatusRunState?, + incompleteCause: StreamStatusIncompleteRunCause?, + ) { + every { handler.updateStreamStatus(any()) } returns StreamStatusRead() + + val invalid = + validUpdate() + .runState(state) + .incompleteRunCause(incompleteCause) + + assertStatus(HttpStatus.BAD_REQUEST, client.statusException(HttpRequest.POST(PATH_UPDATE, invalid))) + } + + @ParameterizedTest + @MethodSource("validPaginationMatrix") + fun testListSuccessful(pagination: Pagination?) { + every { handler.listStreamStatus(any()) } returns StreamStatusReadList() + + val valid = validList().pagination(pagination) + + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(PATH_LIST, valid))) + } + + @ParameterizedTest + @MethodSource("invalidListPaginationMatrix") + fun testListInvalidPagination(invalidPagination: Pagination?) { + every { handler.listStreamStatus(any()) } returns StreamStatusReadList() + + val invalid = validList().pagination(invalidPagination) + + assertStatus(HttpStatus.BAD_REQUEST, client.statusException(HttpRequest.POST(PATH_LIST, invalid))) + } + + @Test + fun testListPerRunStateSuccessful() { + val req = ConnectionIdRequestBody().connectionId(UUID.randomUUID()) + + every { handler.listStreamStatusPerRunState(req) } returns StreamStatusReadList() + + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(PATH_LATEST_PER_RUN_STATE, req))) + } + + companion object { + @JvmStatic + private fun invalidRunStateCauseMatrix(): Stream = + Stream.of( + Arguments.of(StreamStatusRunState.PENDING, StreamStatusIncompleteRunCause.FAILED), + Arguments.of(StreamStatusRunState.PENDING, StreamStatusIncompleteRunCause.CANCELED), + Arguments.of(StreamStatusRunState.RUNNING, StreamStatusIncompleteRunCause.FAILED), + Arguments.of(StreamStatusRunState.RUNNING, StreamStatusIncompleteRunCause.CANCELED), + Arguments.of(StreamStatusRunState.COMPLETE, StreamStatusIncompleteRunCause.FAILED), + Arguments.of(StreamStatusRunState.COMPLETE, StreamStatusIncompleteRunCause.CANCELED), + Arguments.of(StreamStatusRunState.INCOMPLETE, null), + ) + + @JvmStatic + private fun validPaginationMatrix(): Stream = + Stream.of( + Arguments.of(validPagination()), + Arguments.of(validPagination().rowOffset(30)), + Arguments.of(validPagination().pageSize(100).rowOffset(300)), + Arguments.of(validPagination().pageSize(5).rowOffset(10)), + ) + + @JvmStatic + private fun invalidListPaginationMatrix(): Stream = + Stream.of( + Arguments.of(null as Pagination?), + Arguments.of(validPagination().pageSize(0)), + Arguments.of(validPagination().pageSize(-1)), + Arguments.of(validPagination().rowOffset(-1)), + Arguments.of(validPagination().pageSize(-1).rowOffset(-1)), + Arguments.of(validPagination().pageSize(0).rowOffset(-1)), + Arguments.of(validPagination().pageSize(10).rowOffset(23)), + Arguments.of(validPagination().pageSize(20).rowOffset(10)), + Arguments.of(validPagination().pageSize(100).rowOffset(50)), + ) + } +} + +private val testNamespace = "test_" +private val testName = "table_1" +private val workspaceId = UUID.randomUUID() +private val connectionId = UUID.randomUUID() +private val jobId: Long = Random.nextLong() +private val transitionedAtMs: Long = System.currentTimeMillis() + +private fun validCreate(): StreamStatusCreateRequestBody = + StreamStatusCreateRequestBody() + .workspaceId(workspaceId) + .connectionId(connectionId) + .jobId(jobId) + .jobType(StreamStatusJobType.SYNC) + .attemptNumber(0) + .streamNamespace(testNamespace) + .streamName(testName) + .runState(StreamStatusRunState.PENDING) + .transitionedAt(transitionedAtMs) + +private fun validUpdate(): StreamStatusUpdateRequestBody = + StreamStatusUpdateRequestBody() + .workspaceId(workspaceId) + .connectionId(connectionId) + .jobId(jobId) + .jobType(StreamStatusJobType.SYNC) + .attemptNumber(0) + .streamNamespace(testNamespace) + .streamName(testName) + .runState(StreamStatusRunState.PENDING) + .transitionedAt(transitionedAtMs) + .id(UUID.randomUUID()) + +private fun validPagination(): Pagination = + Pagination() + .pageSize(10) + .rowOffset(0) + +fun validList(): StreamStatusListRequestBody = + StreamStatusListRequestBody() + .workspaceId(UUID.randomUUID()) + .jobId(ThreadLocalRandom.current().nextLong()) + .pagination(validPagination()) diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/UserApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/UserApiControllerTest.kt new file mode 100644 index 00000000000..a35a924f59e --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/UserApiControllerTest.kt @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.OrganizationIdRequestBody +import io.airbyte.api.model.generated.OrganizationUserReadList +import io.airbyte.api.model.generated.UserAuthIdRequestBody +import io.airbyte.api.model.generated.UserEmailRequestBody +import io.airbyte.api.model.generated.UserGetOrCreateByAuthIdResponse +import io.airbyte.api.model.generated.UserIdRequestBody +import io.airbyte.api.model.generated.UserRead +import io.airbyte.api.model.generated.UserUpdate +import io.airbyte.api.model.generated.UserWithPermissionInfoReadList +import io.airbyte.api.model.generated.WorkspaceIdRequestBody +import io.airbyte.api.model.generated.WorkspaceUserAccessInfoReadList +import io.airbyte.api.model.generated.WorkspaceUserReadList +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.server.handlers.UserHandler +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test + +@MicronautTest +internal class UserApiControllerTest { + @Inject + lateinit var userHandler: UserHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(UserHandler::class) + fun userHandler(): UserHandler = mockk() + + @Test + fun testGetUser() { + every { userHandler.getUser(any()) } returns UserRead() + + val path = "/api/v1/users/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, UserIdRequestBody()))) + } + + @Test + fun testGetUserByAuthId() { + every { userHandler.getUserByAuthId(any()) } returns UserRead() + + val path = "/api/v1/users/get_by_auth_id" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, UserAuthIdRequestBody()))) + } + + @Test + fun testGetUserByEmail() { + every { userHandler.getUserByEmail(any()) } returns UserRead() + + val path = "/api/v1/users/get_by_email" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, UserEmailRequestBody()))) + } + + @Test + fun testDeleteUser() { + every { userHandler.deleteUser(any()) } returns Unit + + val path = "/api/v1/users/delete" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, UserIdRequestBody()))) + } + + @Test + fun testUpdateUser() { + every { userHandler.updateUser(any()) } returns UserRead() + + val path = "/api/v1/users/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, UserUpdate()))) + } + + @Test + fun testListUsersInOrganization() { + every { userHandler.listUsersInOrganization(any()) } returns OrganizationUserReadList() + + val path = "/api/v1/users/list_by_organization_id" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, OrganizationIdRequestBody()))) + } + + @Test + fun testListUsersInWorkspace() { + every { userHandler.listUsersInWorkspace(any()) } returns WorkspaceUserReadList() + + val path = "/api/v1/users/list_by_workspace_id" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceIdRequestBody()))) + } + + @Test + fun testListInstanceAdminUsers() { + every { userHandler.listInstanceAdminUsers() } returns UserWithPermissionInfoReadList() + + val path = "/api/v1/users/list_instance_admin" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, Jsons.emptyObject()))) + } + + @Test + @Throws(Exception::class) + fun testGetOrCreateUser() { + every { userHandler.getOrCreateUserByAuthId(any()) } returns UserGetOrCreateByAuthIdResponse().userRead(UserRead()) + + val path = "/api/v1/users/get_or_create_by_auth_id" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, UserAuthIdRequestBody()))) + } + + @Test + fun testListAccessInfoByWorkspaceId() { + every { userHandler.listAccessInfoByWorkspaceId(any()) } returns WorkspaceUserAccessInfoReadList() + + val path = "/api/v1/users/list_access_info_by_workspace_id" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, Jsons.serialize(WorkspaceIdRequestBody())))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/WebBackendApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/WebBackendApiControllerTest.kt new file mode 100644 index 00000000000..677b0edf582 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/WebBackendApiControllerTest.kt @@ -0,0 +1,174 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.ConnectionStateType +import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody +import io.airbyte.api.model.generated.SourceIdRequestBody +import io.airbyte.api.model.generated.WebBackendCheckUpdatesRead +import io.airbyte.api.model.generated.WebBackendConnectionRead +import io.airbyte.api.model.generated.WebBackendConnectionReadList +import io.airbyte.api.model.generated.WebBackendConnectionRequestBody +import io.airbyte.api.model.generated.WebBackendGeographiesListResult +import io.airbyte.api.model.generated.WebBackendWorkspaceStateResult +import io.airbyte.api.problems.throwable.generated.ForbiddenProblem +import io.airbyte.commons.server.authorization.ApiAuthorizationHelper +import io.airbyte.commons.server.handlers.WebBackendCheckUpdatesHandler +import io.airbyte.commons.server.handlers.WebBackendConnectionsHandler +import io.airbyte.commons.server.handlers.WebBackendGeographiesHandler +import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.config.persistence.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.handlers.WebBackendCronExpressionHandler +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test +import java.util.UUID + +@MicronautTest(rebuildContext = true) +internal class WebBackendApiControllerTest { + @Inject + lateinit var apiAuthorizationHelper: ApiAuthorizationHelper + + @Inject + lateinit var webBackendConnectionsHandler: WebBackendConnectionsHandler + + @Inject + lateinit var webBackendCheckUpdatesHandler: WebBackendCheckUpdatesHandler + + @Inject + lateinit var webBackendGeographiesHandler: WebBackendGeographiesHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(ApiAuthorizationHelper::class) + fun apiAuthorizationHelper(): ApiAuthorizationHelper = mockk(relaxed = true) + + @MockBean(WebBackendConnectionsHandler::class) + fun webBackendConnectionsHandler(): WebBackendConnectionsHandler = mockk() + + @MockBean(WebBackendCheckUpdatesHandler::class) + fun webBackendCheckUpdatesHandler(): WebBackendCheckUpdatesHandler = mockk() + + @MockBean(WebBackendGeographiesHandler::class) + fun webBackendGeographiesHandler(): WebBackendGeographiesHandler = mockk() + + @MockBean(WebBackendCronExpressionHandler::class) + fun webBackendCronExpressionHandler(): WebBackendCronExpressionHandler = mockk() + + @MockBean(CurrentUserService::class) + fun currentUserService(): CurrentUserService = mockk(relaxed = true) {} + + @Test + fun testGetStateType() { + every { webBackendConnectionsHandler.getStateType(any()) } returns ConnectionStateType.STREAM + + val path = "/api/v1/web_backend/state/get_type" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testWebBackendCheckUpdates() { + every { webBackendCheckUpdatesHandler.checkUpdates() } returns WebBackendCheckUpdatesRead() + val path = "/api/v1/web_backend/check_updates" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testWebBackendCreateConnection() { + every { webBackendConnectionsHandler.webBackendCreateConnection(any()) } returns WebBackendConnectionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/web_backend/connections/create" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + @Disabled("fails for unknown reasons") + fun testWebBackendGetConnection() { + // first two calls succeed, third call will fail + every { + webBackendConnectionsHandler.webBackendGetConnection(any()) + } returns WebBackendConnectionRead() andThen WebBackendConnectionRead() andThenThrows ConfigNotFoundException("", "") + + // This only impacts calls where withRefreshCatalog(true) is present + // first two calls succeed, third call will fail + every { + apiAuthorizationHelper.checkWorkspacesPermissions(any(), any(), any(), any()) + } returns Unit andThen Unit andThenThrows ForbiddenProblem() + + val path = "/api/v1/web_backend/connections/get" + + // first call doesn't activate checkWorkspacePermissions because withRefreshedCatalog is false + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WebBackendConnectionRequestBody()))) + +// // second call activates checkWorkspacePermissions because withRefreshedCatalog is true, and passes +// // the check + assertStatus( + HttpStatus.OK, + client.status(HttpRequest.POST(path, WebBackendConnectionRequestBody().connectionId(UUID.randomUUID()).withRefreshedCatalog(true))), + ) +// +// // third call activates checkWorkspacePermissions because withRefreshedCatalog is true, passes it, +// // but then fails on the 404 + assertStatus( + HttpStatus.NOT_FOUND, + client.statusException(HttpRequest.POST(path, WebBackendConnectionRequestBody().connectionId(UUID.randomUUID()).withRefreshedCatalog(true))), + ) +// +// // fourth call activates checkWorkspacePermissions because withRefreshedCatalog is true, but fails +// // the check, so 403s + assertStatus( + HttpStatus.FORBIDDEN, + client.statusException(HttpRequest.POST(path, WebBackendConnectionRequestBody().connectionId(UUID.randomUUID()).withRefreshedCatalog(true))), + ) + } + + @Test + fun testWebBackendGetWorkspaceState() { + every { webBackendConnectionsHandler.getWorkspaceState(any()) } returns WebBackendWorkspaceStateResult() + + val path = "/api/v1/web_backend/workspace/state" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testWebBackendListConnectionsForWorkspace() { + every { webBackendConnectionsHandler.webBackendListConnectionsForWorkspace(any()) } returns WebBackendConnectionReadList() + + val path = "/api/v1/web_backend/connections/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testWebBackendListGeographies() { + every { webBackendGeographiesHandler.listGeographiesOSS() } returns WebBackendGeographiesListResult() + + val path = "/api/v1/web_backend/geographies/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testWebBackendUpdateConnection() { + every { webBackendConnectionsHandler.webBackendUpdateConnection(any()) } returns WebBackendConnectionRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/web_backend/connections/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/WorkspaceApiControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/WorkspaceApiControllerTest.kt new file mode 100644 index 00000000000..8573e5da751 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/WorkspaceApiControllerTest.kt @@ -0,0 +1,172 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.PermissionCheckRead +import io.airbyte.api.model.generated.SourceDefinitionIdRequestBody +import io.airbyte.api.model.generated.SourceIdRequestBody +import io.airbyte.api.model.generated.WorkspaceCreate +import io.airbyte.api.model.generated.WorkspaceCreateWithId +import io.airbyte.api.model.generated.WorkspaceRead +import io.airbyte.api.model.generated.WorkspaceReadList +import io.airbyte.api.model.generated.WorkspaceUpdateOrganization +import io.airbyte.commons.server.handlers.PermissionHandler +import io.airbyte.commons.server.handlers.WorkspacesHandler +import io.airbyte.commons.server.support.CurrentUserService +import io.airbyte.config.AuthenticatedUser +import io.airbyte.data.exceptions.ConfigNotFoundException +import io.airbyte.server.assertStatus +import io.airbyte.server.status +import io.airbyte.server.statusException +import io.micronaut.http.HttpRequest +import io.micronaut.http.HttpStatus +import io.micronaut.http.client.HttpClient +import io.micronaut.http.client.annotation.Client +import io.micronaut.test.annotation.MockBean +import io.micronaut.test.extensions.junit5.annotation.MicronautTest +import io.mockk.every +import io.mockk.mockk +import jakarta.inject.Inject +import org.junit.jupiter.api.Test +import java.util.UUID + +@MicronautTest(rebuildContext = true) +internal class WorkspaceApiControllerTest { + @Inject + lateinit var permissionHandler: PermissionHandler + + @Inject + lateinit var currentUserService: CurrentUserService + + @Inject + lateinit var workspacesHandler: WorkspacesHandler + + @Inject + @Client("/") + lateinit var client: HttpClient + + @MockBean(PermissionHandler::class) + fun permissionHandler(): PermissionHandler = mockk() + + @MockBean(CurrentUserService::class) + fun currentUserService(): CurrentUserService = mockk() + + @MockBean(WorkspacesHandler::class) + fun workspacesHandler(): WorkspacesHandler = mockk() + + @Test + fun testCreateWorkspace() { + every { permissionHandler.checkPermissions(any()) } returns PermissionCheckRead().status(PermissionCheckRead.StatusEnum.SUCCEEDED) andThen + PermissionCheckRead().status(PermissionCheckRead.StatusEnum.FAILED) + every { workspacesHandler.createWorkspace(any()) } returns WorkspaceRead() + every { currentUserService.getCurrentUser() } returns AuthenticatedUser() + + val path = "/api/v1/workspaces/create" + + // no org id, expect 200 + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + // org id present, permission check succeeds, expect 200 + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceCreate().organizationId(UUID.randomUUID())))) + // org id present, permission check fails, expect 403 + assertStatus(HttpStatus.FORBIDDEN, client.statusException(HttpRequest.POST(path, WorkspaceCreate().organizationId(UUID.randomUUID())))) + } + + @Test + fun testCreateWorkspaceIfNotExist() { + every { permissionHandler.checkPermissions(any()) } returns PermissionCheckRead().status(PermissionCheckRead.StatusEnum.SUCCEEDED) andThen + PermissionCheckRead().status(PermissionCheckRead.StatusEnum.FAILED) + every { workspacesHandler.createWorkspaceIfNotExist(any()) } returns WorkspaceRead() + every { currentUserService.getCurrentUser() } returns AuthenticatedUser() + + val path = "/api/v1/workspaces/create_if_not_exist" + + // no org id, expect 200 + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceCreateWithId()))) + // org id present, permission check succeeds, expect 200 + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceCreateWithId().organizationId(UUID.randomUUID())))) + // org id present, permission check fails, expect 403 + assertStatus(HttpStatus.FORBIDDEN, client.statusException(HttpRequest.POST(path, WorkspaceCreateWithId().organizationId(UUID.randomUUID())))) + } + + @Test + fun testDeleteWorkspace() { + every { workspacesHandler.deleteWorkspace(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/workspaces/delete" + assertStatus(HttpStatus.NO_CONTENT, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testGetWorkspace() { + every { workspacesHandler.getWorkspace(any()) } returns WorkspaceRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/workspaces/get" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testGetBySlugWorkspace() { + every { workspacesHandler.getWorkspaceBySlug(any()) } returns WorkspaceRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/workspaces/get_by_slug" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testListWorkspace() { + every { workspacesHandler.listWorkspaces() } returns WorkspaceReadList() + + val path = "/api/v1/workspaces/list" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + } + + @Test + fun testUpdateWorkspace() { + every { workspacesHandler.updateWorkspace(any()) } returns WorkspaceRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/workspaces/update" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testUpdateWorkspaceOrganization() { + every { workspacesHandler.updateWorkspaceOrganization(any()) } returns WorkspaceRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/workspaces/update_organization" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, WorkspaceUpdateOrganization()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, WorkspaceUpdateOrganization()))) + } + + @Test + fun testUpdateWorkspaceFeedback() { + every { workspacesHandler.setFeedbackDone(any()) } returns Unit andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/workspaces/tag_feedback_status_as_done" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testUpdateWorkspaceName() { + every { workspacesHandler.updateWorkspaceName(any()) } returns WorkspaceRead() andThenThrows ConfigNotFoundException("", "") + + val path = "/api/v1/workspaces/update_name" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceDefinitionIdRequestBody()))) + } + + @Test + fun testGetWorkspaceByConnectionId() { + every { workspacesHandler.getWorkspaceByConnectionId(any(), any()) } returns WorkspaceRead() andThenThrows + ConfigNotFoundException("", "") + + val path = "/api/v1/workspaces/get_by_connection_id" + assertStatus(HttpStatus.OK, client.status(HttpRequest.POST(path, SourceIdRequestBody()))) + assertStatus(HttpStatus.NOT_FOUND, client.statusException(HttpRequest.POST(path, SourceIdRequestBody()))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/services/JobServiceTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/services/JobServiceTest.kt index db981881506..fe54f28f90b 100644 --- a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/services/JobServiceTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/publicapi/services/JobServiceTest.kt @@ -22,22 +22,18 @@ class JobServiceTest { private val connectionId = UUID.randomUUID() - private val schedulerHandler = mockk() + @Inject + lateinit var schedulerHandler: SchedulerHandler @MockBean(SchedulerHandler::class) - fun schedulerHandler(): SchedulerHandler { - return schedulerHandler - } + fun schedulerHandler(): SchedulerHandler = mockk() @MockBean(ApplicationService::class) - fun applicationService(): ApplicationService { - return mockk() - } + fun applicationService(): ApplicationService = mockk() @Test fun `test sync already running value conflict known exception`() { val failureReason = "A sync is already running for: $connectionId" - val schedulerHandler = schedulerHandler() every { schedulerHandler.syncConnection(any()) } throws ValueConflictKnownException(failureReason) @@ -47,7 +43,6 @@ class JobServiceTest { @Test fun `test sync already running illegal state exception`() { val failureReason = "A sync is already running for: $connectionId" - val schedulerHandler = schedulerHandler() every { schedulerHandler.syncConnection(any()) } throws IllegalStateException(failureReason) diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/handlers/WebBackendCronExpressionHandlerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/handlers/WebBackendCronExpressionHandlerTest.kt index 74bd3b4ccc7..722e66026b6 100644 --- a/airbyte-server/src/test/kotlin/io/airbyte/server/handlers/WebBackendCronExpressionHandlerTest.kt +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/handlers/WebBackendCronExpressionHandlerTest.kt @@ -2,6 +2,7 @@ package io.airbyte.server.handlers import io.airbyte.api.model.generated.WebBackendDescribeCronExpressionRequestBody import io.airbyte.api.problems.throwable.generated.CronValidationInvalidExpressionProblem +import io.airbyte.commons.server.helpers.CronExpressionHelper import junit.framework.TestCase.assertEquals import junit.framework.TestCase.assertNotNull import org.junit.jupiter.api.Test @@ -12,7 +13,8 @@ const val CRON_EVERY_MINUTE = "0 * * * * ?" const val Y2K = "0 0 0 1 1 ? 2000" class WebBackendCronExpressionHandlerTest { - private var webBackendCronExpressionHandler: WebBackendCronExpressionHandler = WebBackendCronExpressionHandler() + private val cronExpressionHelper: CronExpressionHelper = CronExpressionHelper() + private val webBackendCronExpressionHandler: WebBackendCronExpressionHandler = WebBackendCronExpressionHandler(cronExpressionHelper) @Test fun testDescribeEveryHourCronExpression() { diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/handlers/WebBackendMappersHandlerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/handlers/WebBackendMappersHandlerTest.kt new file mode 100644 index 00000000000..cd46c494cab --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/handlers/WebBackendMappersHandlerTest.kt @@ -0,0 +1,167 @@ +package io.airbyte.server.handlers + +import io.airbyte.api.model.generated.AirbyteCatalog +import io.airbyte.api.model.generated.ConfiguredStreamMapper +import io.airbyte.api.model.generated.ConnectionRead +import io.airbyte.api.model.generated.FieldSpec +import io.airbyte.api.model.generated.MapperValidationErrorType +import io.airbyte.api.model.generated.StreamDescriptor +import io.airbyte.api.model.generated.WebBackendValidateMappersRequestBody +import io.airbyte.commons.json.Jsons +import io.airbyte.commons.server.handlers.ConnectionsHandler +import io.airbyte.commons.server.handlers.helpers.CatalogConverter +import io.airbyte.config.AirbyteStream +import io.airbyte.config.ConfiguredAirbyteCatalog +import io.airbyte.config.ConfiguredAirbyteStream +import io.airbyte.config.DestinationSyncMode +import io.airbyte.config.Field +import io.airbyte.config.FieldType +import io.airbyte.config.MapperConfig +import io.airbyte.config.SyncMode +import io.airbyte.mappers.transformations.DestinationCatalogGenerator +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +class WebBackendMappersHandlerTest { + companion object { + private const val STREAM_NAME = "stream" + private const val STREAM_NAMESPACE = "namespace" + private const val USERNAME_FIELD = "username" + private const val PASSWORD_FIELD = "password" + private const val PASSWORD_FIELD_HASHED = "password_hashed" + } + + private val connectionsHandler = mockk() + private val catalogConverter = mockk() + private val destinationCatalogGenerator = mockk() + + private val webBackendMappersHandler = WebBackendMappersHandler(connectionsHandler, catalogConverter, destinationCatalogGenerator) + + @Test + fun testValidateMappers() { + val connectionId = UUID.randomUUID() + + val apiCatalog = mockk() + every { connectionsHandler.getConnection(any()) } returns ConnectionRead().syncCatalog(apiCatalog) + + val ogFields = buildFields(USERNAME_FIELD, PASSWORD_FIELD) + val configuredStream = buildStream(STREAM_NAME, ogFields) + val configuredCatalog = + ConfiguredAirbyteCatalog( + listOf( + buildStream("otherStream"), + configuredStream, + buildStream("yetAnotherStream"), + ), + ) + every { catalogConverter.toConfiguredInternal(apiCatalog) } returns configuredCatalog + + val apiMappers = listOf(mockk(), mockk()) + val allMappers = listOf(mockk(relaxed = true), mockk(relaxed = true)) + every { catalogConverter.toConfiguredMappers(apiMappers) } returns allMappers + + // First mapper hashes the password field + val streamWithFirstMapper = configuredStream.copy(mappers = listOf(allMappers[0])) + val outputFields = buildFields(USERNAME_FIELD, PASSWORD_FIELD_HASHED) + every { + destinationCatalogGenerator.generateDestinationCatalog(buildCatalog(streamWithFirstMapper)) + } returns + DestinationCatalogGenerator.CatalogGenerationResult( + buildCatalog(streamWithFirstMapper, outputFields), + mapOf(), + ) + + // second mapper has a validation error + val streamWithBothMappers = configuredStream.copy(mappers = allMappers) + every { + destinationCatalogGenerator.generateDestinationCatalog(buildCatalog(streamWithBothMappers)) + } returns + DestinationCatalogGenerator.CatalogGenerationResult( + buildCatalog(streamWithBothMappers, outputFields), + mapOf( + io.airbyte.config.StreamDescriptor().withName(STREAM_NAME).withNamespace(STREAM_NAMESPACE) to + mapOf( + allMappers[1] to + DestinationCatalogGenerator.MapperError( + DestinationCatalogGenerator.MapperErrorType.FIELD_NOT_FOUND, + "Field not found", + ), + ), + ), + ) + + val req = + WebBackendValidateMappersRequestBody() + .connectionId(connectionId) + .streamDescriptor(StreamDescriptor().name(STREAM_NAME).namespace(STREAM_NAMESPACE)) + .mappers(apiMappers) + + val res = webBackendMappersHandler.validateMappers(req) + + assertEquals( + listOf( + FieldSpec().name(USERNAME_FIELD).type(FieldSpec.TypeEnum.STRING), + FieldSpec().name(PASSWORD_FIELD).type(FieldSpec.TypeEnum.STRING), + ), + res.initialFields, + ) + + assertEquals(2, res.mappers.size) + assertEquals( + listOf( + FieldSpec().name(USERNAME_FIELD).type(FieldSpec.TypeEnum.STRING), + FieldSpec().name(PASSWORD_FIELD_HASHED).type(FieldSpec.TypeEnum.STRING), + ), + res.mappers[0].outputFields, + ) + assertEquals( + listOf( + FieldSpec().name(USERNAME_FIELD).type(FieldSpec.TypeEnum.STRING), + FieldSpec().name(PASSWORD_FIELD_HASHED).type(FieldSpec.TypeEnum.STRING), + ), + res.mappers[1].outputFields, + ) + + assertEquals(MapperValidationErrorType.FIELD_NOT_FOUND, res.mappers[1].validationError.type) + assertEquals("Field not found", res.mappers[1].validationError.message) + + verify { + catalogConverter.toConfiguredInternal(apiCatalog) + catalogConverter.toConfiguredMappers(apiMappers) + } + + verify(exactly = 2) { + destinationCatalogGenerator.generateDestinationCatalog(any()) + } + } + + private fun buildFields(vararg fields: String): List { + return fields.map { Field(it, FieldType.STRING) } + } + + private fun buildStream( + name: String, + fields: List? = null, + ): ConfiguredAirbyteStream { + return ConfiguredAirbyteStream.Builder() + .stream( + AirbyteStream(name, Jsons.emptyObject(), listOf()) + .withNamespace(STREAM_NAMESPACE), + ) + .syncMode(SyncMode.FULL_REFRESH) + .destinationSyncMode(DestinationSyncMode.OVERWRITE) + .fields(fields) + .build() + } + + private fun buildCatalog( + stream: ConfiguredAirbyteStream, + fields: List? = null, + ): ConfiguredAirbyteCatalog { + return ConfiguredAirbyteCatalog(listOf(stream.copy(fields = fields ?: stream.fields))) + } +} diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/services/DataplaneServiceTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/services/DataplaneServiceTest.kt new file mode 100644 index 00000000000..1df5bdac7d9 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/services/DataplaneServiceTest.kt @@ -0,0 +1,155 @@ +package io.airbyte.server.services + +import io.airbyte.api.model.generated.ActorType +import io.airbyte.api.model.generated.WorkloadPriority +import io.airbyte.config.DestinationConnection +import io.airbyte.config.Geography +import io.airbyte.config.ScopedConfiguration +import io.airbyte.config.SourceConnection +import io.airbyte.config.StandardSync +import io.airbyte.data.services.ConnectionService +import io.airbyte.data.services.DestinationService +import io.airbyte.data.services.ScopedConfigurationService +import io.airbyte.data.services.SourceService +import io.airbyte.data.services.WorkspaceService +import io.airbyte.featureflag.CloudProvider +import io.airbyte.featureflag.CloudProviderRegion +import io.airbyte.featureflag.Connection +import io.airbyte.featureflag.FeatureFlagClient +import io.airbyte.featureflag.GeographicRegion +import io.airbyte.featureflag.Multi +import io.airbyte.featureflag.Priority +import io.airbyte.featureflag.Priority.Companion.HIGH_PRIORITY +import io.airbyte.featureflag.WorkloadApiRouting +import io.airbyte.featureflag.Workspace +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.UUID + +class DataplaneServiceTest { + private lateinit var connectionService: ConnectionService + private lateinit var workspaceService: WorkspaceService + private lateinit var sourceService: SourceService + private lateinit var destinationService: DestinationService + private lateinit var featureFlagClient: FeatureFlagClient + private lateinit var scopedConfigurationService: ScopedConfigurationService + + private val connectionId = UUID.randomUUID() + private val sourceId = UUID.randomUUID() + private val destinationId = UUID.randomUUID() + private val workspaceId = UUID.randomUUID() + + @BeforeEach + fun setup() { + // Setup all fallbacks/base cases + connectionService = mockk() + every { connectionService.getStandardSync(connectionId) } returns StandardSync().withGeography(Geography.EU).withDestinationId((destinationId)) + workspaceService = mockk() + every { workspaceService.getGeographyForWorkspace(workspaceId) } returns Geography.US + sourceService = mockk() + every { sourceService.getSourceConnection(sourceId) } returns SourceConnection().withWorkspaceId(workspaceId) + destinationService = mockk() + every { destinationService.getDestinationConnection(destinationId) } returns DestinationConnection().withWorkspaceId(workspaceId) + featureFlagClient = mockk() + every { featureFlagClient.stringVariation(any(), any()) } returns "auto" + scopedConfigurationService = mockk() + every { scopedConfigurationService.getScopedConfigurations(any(), any()) } returns listOf() + } + + @Test + fun testGetQueueNameWithConnection() { + val workloadPriority: WorkloadPriority = WorkloadPriority.HIGH + val dataplaneService = + DataplaneService(connectionService, workspaceService, sourceService, destinationService, featureFlagClient, scopedConfigurationService) + dataplaneService.getQueueName(connectionId, null, null, null, workloadPriority) + + verify(exactly = 1) { connectionService.getStandardSync(connectionId) } + verify(exactly = 1) { destinationService.getDestinationConnection(destinationId) } + verify(exactly = 0) { workspaceService.getGeographyForWorkspace(workspaceId) } + + val expectedContext = + listOf( + io.airbyte.featureflag.Geography(Geography.EU.toString()), + Workspace(workspaceId), + Priority(HIGH_PRIORITY), + Connection(connectionId.toString()), + ) + verify(exactly = 1) { featureFlagClient.stringVariation(WorkloadApiRouting, Multi(expectedContext)) } + } + + @Test + fun testGetQueueNameWithSource() { + val workloadPriority: WorkloadPriority = WorkloadPriority.HIGH + val dataplaneService = + DataplaneService(connectionService, workspaceService, sourceService, destinationService, featureFlagClient, scopedConfigurationService) + + dataplaneService.getQueueName(null, ActorType.SOURCE, sourceId, null, workloadPriority) + verify(exactly = 1) { sourceService.getSourceConnection(sourceId) } + verify(exactly = 1) { workspaceService.getGeographyForWorkspace(workspaceId) } + + val expectedContext = listOf(io.airbyte.featureflag.Geography(Geography.US.toString()), Workspace(workspaceId), Priority(HIGH_PRIORITY)) + verify(exactly = 1) { featureFlagClient.stringVariation(WorkloadApiRouting, Multi(expectedContext)) } + } + + @Test + fun testGetQueueNameWithDestination() { + val workloadPriority: WorkloadPriority = WorkloadPriority.DEFAULT + val dataplaneService = + DataplaneService(connectionService, workspaceService, sourceService, destinationService, featureFlagClient, scopedConfigurationService) + + dataplaneService.getQueueName(null, ActorType.DESTINATION, destinationId, null, workloadPriority) + verify(exactly = 1) { destinationService.getDestinationConnection(destinationId) } + verify(exactly = 1) { workspaceService.getGeographyForWorkspace(workspaceId) } + + val expectedContext = listOf(io.airbyte.featureflag.Geography(Geography.US.toString()), Workspace(workspaceId)) + verify(exactly = 1) { featureFlagClient.stringVariation(WorkloadApiRouting, Multi(expectedContext)) } + } + + @Test + fun testGetQueueNameWithScopedConfig() { + val workloadPriority: WorkloadPriority = WorkloadPriority.DEFAULT + val localScopedConfigurationService: ScopedConfigurationService = mockk() + every { localScopedConfigurationService.getScopedConfigurations(any(), any()) } returns listOf(ScopedConfiguration()) + val dataplaneService = + DataplaneService(connectionService, workspaceService, sourceService, destinationService, featureFlagClient, localScopedConfigurationService) + + dataplaneService.getQueueName(null, ActorType.DESTINATION, destinationId, null, workloadPriority) + verify(exactly = 1) { destinationService.getDestinationConnection(destinationId) } + verify(exactly = 1) { workspaceService.getGeographyForWorkspace(workspaceId) } + + val expectedContext = + listOf( + CloudProvider(CloudProvider.AWS), + GeographicRegion(GeographicRegion.US), + Workspace(workspaceId), + CloudProviderRegion(CloudProviderRegion.AWS_US_EAST_1), + ) + verify(exactly = 1) { featureFlagClient.stringVariation(WorkloadApiRouting, Multi(expectedContext)) } + } + + @Test + fun testGetQueueNameWithScopedConfigAndConnectionId() { + val workloadPriority: WorkloadPriority = WorkloadPriority.DEFAULT + val localScopedConfigurationService: ScopedConfigurationService = mockk() + every { localScopedConfigurationService.getScopedConfigurations(any(), any()) } returns listOf(ScopedConfiguration()) + val dataplaneService = + DataplaneService(connectionService, workspaceService, sourceService, destinationService, featureFlagClient, localScopedConfigurationService) + + dataplaneService.getQueueName(connectionId, null, null, null, workloadPriority) + verify(exactly = 1) { connectionService.getStandardSync(connectionId) } + verify(exactly = 0) { workspaceService.getGeographyForWorkspace(workspaceId) } + + val expectedWithConnection = + listOf( + CloudProvider(CloudProvider.AWS), + GeographicRegion(GeographicRegion.EU), + Workspace(workspaceId), + CloudProviderRegion(CloudProviderRegion.AWS_US_EAST_1), + Connection(connectionId), + ) + verify(exactly = 1) { featureFlagClient.stringVariation(WorkloadApiRouting, Multi(expectedWithConnection)) } + } +} diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java index 235914511bc..c5add612b07 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/AcceptanceTestsResources.java @@ -7,6 +7,7 @@ import static io.airbyte.config.persistence.OrganizationPersistence.DEFAULT_ORGANIZATION_ID; import static io.airbyte.test.utils.AcceptanceTestUtils.createAirbyteApiClient; import static io.airbyte.test.utils.AcceptanceTestUtils.modifyCatalog; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -29,6 +30,7 @@ import io.airbyte.api.client.model.generated.JobInfoRead; import io.airbyte.api.client.model.generated.JobRead; import io.airbyte.api.client.model.generated.JobStatus; +import io.airbyte.api.client.model.generated.LogFormatType; import io.airbyte.api.client.model.generated.SourceDefinitionIdRequestBody; import io.airbyte.api.client.model.generated.SourceDefinitionRead; import io.airbyte.api.client.model.generated.SourceDiscoverSchemaRead; @@ -141,7 +143,7 @@ void runIncrementalSyncForAWorkspaceId(final UUID workspaceId) throws Exception final AirbyteCatalog retrievedCatalog = discoverResult.getCatalog(); final AirbyteStream stream = retrievedCatalog.getStreams().get(0).getStream(); - Assertions.assertEquals(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), stream.getSupportedSyncModes()); + assertEquals(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), stream.getSupportedSyncModes()); Assertions.assertFalse(stream.getSourceDefinedCursor()); assertTrue(stream.getDefaultCursorField().isEmpty()); assertTrue(stream.getSourceDefinedPrimaryKey().isEmpty()); @@ -254,7 +256,7 @@ SyncIds runSmallSyncForAWorkspaceId(final UUID workspaceId) throws Exception { final AirbyteCatalog retrievedCatalog = discoverResult.getCatalog(); final AirbyteStream stream = retrievedCatalog.getStreams().get(0).getStream(); - Assertions.assertEquals(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), stream.getSupportedSyncModes()); + assertEquals(Lists.newArrayList(SyncMode.FULL_REFRESH, SyncMode.INCREMENTAL), stream.getSupportedSyncModes()); Assertions.assertFalse(stream.getSourceDefinedCursor()); assertTrue(stream.getDefaultCursorField().isEmpty()); assertTrue(stream.getSourceDefinedPrimaryKey().isEmpty()); @@ -301,7 +303,10 @@ SyncIds runSmallSyncForAWorkspaceId(final UUID workspaceId) throws Exception { final var attemptId = connectionSyncRead1.getAttempts().size() - 1; final var attempt = testHarness.getApiClient().getAttemptApi().getAttemptForJob( new GetAttemptStatsRequestBody(jobId, attemptId)); - assertFalse(attempt.getLogs().getLogLines().isEmpty()); + // Structured logs should exist + assertEquals(LogFormatType.STRUCTURED, attempt.getLogType()); + assertFalse(attempt.getLogs().getEvents().isEmpty()); + assertTrue(attempt.getLogs().getLogLines().isEmpty()); return new SyncIds(connectionId, jobId, attemptId); } diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java index 610635d0dcd..4acf7283345 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SchemaManagementTests.java @@ -10,7 +10,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; import io.airbyte.api.client.model.generated.AirbyteCatalog; import io.airbyte.api.client.model.generated.AirbyteStream; @@ -159,8 +158,8 @@ void afterEach() { /** * Verify that if we call web_backend/connections/get with some connection id and - * refreshSchema=true, then: - We'll detect schema changes for the given connection. - We'll detect - * schema changes for any connections sharing the same source. + * refreshSchema=true, then: - We'll detect schema changes for the given connection. - We do not + * evaluate schema changes for other connections. */ @Test void detectBreakingSchemaChangeViaWebBackendGetConnection() throws Exception { @@ -174,9 +173,8 @@ void detectBreakingSchemaChangeViaWebBackendGetConnection() throws Exception { assertEquals(ConnectionStatus.INACTIVE, currentConnection.getStatus()); final ConnectionRead currentConnectionWithSameSource = testHarness.getConnection(createdConnectionWithSameSource.getConnectionId()); - assertTrue(currentConnectionWithSameSource.getBreakingChange()); - assertEquals(createdConnectionWithSameSource.getSyncCatalog(), currentConnectionWithSameSource.getSyncCatalog()); - assertEquals(ConnectionStatus.INACTIVE, currentConnectionWithSameSource.getStatus()); + assertFalse(currentConnectionWithSameSource.getBreakingChange()); + assertEquals(ConnectionStatus.ACTIVE, currentConnectionWithSameSource.getStatus()); } @Test diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java index 0b59d0a2334..54def201369 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/SyncAcceptanceTests.java @@ -226,7 +226,7 @@ void testCronSync() throws Exception { // NOTE: this cron should run once every two minutes. final ConnectionScheduleData connectionScheduleData = new ConnectionScheduleData(null, - new ConnectionScheduleDataCron("* */2 * * * ?", "UTC")); + new ConnectionScheduleDataCron("0 */2 * * * ?", "UTC")); final SyncMode srcSyncMode = SyncMode.FULL_REFRESH; final DestinationSyncMode dstSyncMode = DestinationSyncMode.OVERWRITE; final AirbyteCatalog catalog = modifyCatalog( diff --git a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadEnterpriseAcceptanceTests.java b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadEnterpriseAcceptanceTests.java index 6e412389cb2..c0eeb244a61 100644 --- a/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadEnterpriseAcceptanceTests.java +++ b/airbyte-tests/src/test-acceptance/java/io/airbyte/test/acceptance/WorkloadEnterpriseAcceptanceTests.java @@ -61,7 +61,7 @@ final boolean ranWithWorkload(final UUID connectionId, final long jobId, final i final var attempt = testResources.getTestHarness().getApiClient().getAttemptApi().getAttemptForJob( new GetAttemptStatsRequestBody(jobId, attemptNumber)); final String creatingWorkloadLog = "Starting workload heartbeat"; - return attempt.getLogs().getLogLines().stream().anyMatch(l -> l.contains(creatingWorkloadLog)); + return attempt.getLogs().getEvents().stream().anyMatch(l -> l.getMessage().contains(creatingWorkloadLog)); } @Test diff --git a/airbyte-webapp/Dockerfile b/airbyte-webapp/Dockerfile index 885beeb99e4..eafb2b2e189 100644 --- a/airbyte-webapp/Dockerfile +++ b/airbyte-webapp/Dockerfile @@ -1,4 +1,4 @@ -ARG NGINX_IMAGE=nginxinc/nginx-unprivileged:alpine3.20 +ARG NGINX_IMAGE=airbyte/nginx-unprivileged:alpine3.20-2 FROM ${NGINX_IMAGE} ARG BUILD_DIR=bin/build @@ -6,15 +6,13 @@ ARG NGINX_CONFIG=bin/nginx/default.conf.template EXPOSE 8080 -USER root +COPY --chown=nginx:nginx ${BUILD_DIR} /usr/share/nginx/html +COPY --chown=nginx:nginx ${NGINX_CONFIG} /etc/nginx/templates/default.conf.template -COPY ${BUILD_DIR} /usr/share/nginx/html +USER root RUN < { - after(() => { - cy.logout(); - }); - - it("loads without error", () => { - cy.login(); - cy.selectWorkspace(); - cy.hasNavigatedTo("/connections"); - cy.contains("Billing").click({ force: true }); - cy.hasNavigatedTo("/billing"); - cy.contains(/(Buy|Remaining) credits/); - }); -}); diff --git a/airbyte-webapp/cypress/commands/connectorBuilder.ts b/airbyte-webapp/cypress/commands/connectorBuilder.ts index da9de64dd02..642bc2d5333 100644 --- a/airbyte-webapp/cypress/commands/connectorBuilder.ts +++ b/airbyte-webapp/cypress/commands/connectorBuilder.ts @@ -65,7 +65,7 @@ export const configureParameterizedRequests = (numberOfParameters: number) => { goToView("0"); enableParameterizedRequests(); configureParameters(Array.from(Array(numberOfParameters).keys()).join(","), "item_id"); - enterUrlPath("items/{{}{{} stream_slice.item_id }}"); + enterUrlPath("items/{{}{{} stream_slice.item_id"); }; export const publishProject = () => { @@ -170,10 +170,15 @@ export const assertSchemaMismatch = () => { }; export const assertUrlPath = (urlPath: string) => { - getUrlPathInput().should("have.attr", "value", urlPath); + getUrlPathInput().contains(urlPath); }; export const acceptSchema = () => { openDetectedSchemaTab(); cy.get("[data-testid='accept-schema']").click(); }; + +export const focusAndType = (selector: string, text: string) => { + cy.get(selector).click(); + cy.get(selector).type(text); +}; diff --git a/airbyte-webapp/cypress/e2e/connectorBuilder.cy.ts b/airbyte-webapp/cypress/e2e/connectorBuilder.cy.ts index 1e655e013d6..33ee3878bb5 100644 --- a/airbyte-webapp/cypress/e2e/connectorBuilder.cy.ts +++ b/airbyte-webapp/cypress/e2e/connectorBuilder.cy.ts @@ -78,10 +78,7 @@ describe("Connector builder", { testIsolation: false, tags: "@builder" }, () => it("Read - With pagination", () => { configurePagination(); - enterUrlPath("items/"); - testStream(); - assertMultiPageReadItems(); }); diff --git a/airbyte-webapp/cypress/pages/connectorBuilderPage.ts b/airbyte-webapp/cypress/pages/connectorBuilderPage.ts index 5f7c8b380c7..143571c06b3 100644 --- a/airbyte-webapp/cypress/pages/connectorBuilderPage.ts +++ b/airbyte-webapp/cypress/pages/connectorBuilderPage.ts @@ -1,16 +1,18 @@ import { selectFromDropdown } from "@cy/commands/common"; +import { focusAndType } from "@cy/commands/connectorBuilder"; const startFromScratchButton = "[data-testid='start-from-scratch']"; const nameLabel = "[data-testid='connector-name-label']"; const nameInput = "[data-testid='connector-name-input']"; -const urlBaseInput = "input[name='formValues.global.urlBase']"; +const urlBaseInput = "[name='formValues.global.urlBase']"; const addStreamButton = "[data-testid='add-stream']"; const apiKeyInput = "input[name='connectionConfiguration.api_key']"; const togglePaginationInput = "[data-testid='toggle-formValues.streams.0.paginator']"; const toggleParameterizedRequestsInput = "input[data-testid='toggle-formValues.streams.0.parameterizedRequests']"; +const parameterizedRequestsCursorInput = "[name='formValues.streams.0.parameterizedRequests.0.cursor_field']"; const streamNameInput = "input[name='streamName']"; -const streamUrlPathFromModal = "input[name='urlPath']"; -const streamUrlPathFromForm = "input[name='formValues.streams.0.urlPath']"; +const streamUrlPathFromModal = "[name='urlPath']"; +const streamUrlPathFromForm = "[name='formValues.streams.0.urlPath']"; const recordSelectorToggle = "[data-testid='toggle-formValues.streams.0.recordSelector']"; const recordSelectorFieldPathInput = "[data-testid='tag-input-formValues.streams.0.recordSelector.fieldPath'] input"; const authType = "[data-testid='formValues.global.authenticator.type']"; @@ -38,7 +40,7 @@ export const editProjectBuilder = (name: string) => { }; export const startFromScratch = () => { - cy.get(startFromScratchButton).click(); + cy.get(startFromScratchButton, { timeout: 20000 }).click(); }; export const enterName = (name: string) => { @@ -48,7 +50,7 @@ export const enterName = (name: string) => { }; export const enterUrlBase = (urlBase: string) => { - cy.get(urlBaseInput).type(urlBase); + focusAndType(urlBaseInput, urlBase); }; export const enterRecordSelector = (recordSelector: string) => { @@ -67,7 +69,7 @@ export const selectActiveVersion = (name: string, version: number) => { }; export const goToView = (view: string) => { - cy.get(`button[data-testid=navbutton-${view}]`).click(); + cy.get(`button[data-testid=navbutton-${view}]`, { timeout: 20000 }).click(); }; export const openTestInputs = () => { @@ -96,9 +98,9 @@ export const configureLimitOffsetPagination = ( ) => { cy.get(limitInput).type(limit); selectFromDropdown(injectLimitInto, limitInto); - cy.get(injectLimitFieldName).type(limitFieldName); + focusAndType(injectLimitFieldName, limitFieldName); selectFromDropdown(injectOffsetInto, offsetInto); - cy.get(injectOffsetFieldName).type(offsetFieldName); + focusAndType(injectOffsetFieldName, offsetFieldName); }; export const enableParameterizedRequests = () => { @@ -110,7 +112,7 @@ export const configureParameters = (values: string, cursor_field: string) => { cy.get('[data-testid="tag-input-formValues.streams.0.parameterizedRequests.0.values.value"] input[type="text"]').type( values ); - cy.get("[name='formValues.streams.0.parameterizedRequests.0.cursor_field']").type(cursor_field); + focusAndType(parameterizedRequestsCursorInput, cursor_field); }; export const getSlicesFromDropdown = () => { @@ -139,7 +141,7 @@ export const enterStreamName = (streamName: string) => { }; export const enterUrlPathFromForm = (urlPath: string) => { - cy.get(streamUrlPathFromModal).type(urlPath); + focusAndType(streamUrlPathFromModal, urlPath); }; export const getUrlPathInput = () => { @@ -147,9 +149,8 @@ export const getUrlPathInput = () => { }; export const enterUrlPath = (urlPath: string) => { - cy.get('[name="formValues.streams.0.urlPath"]').focus(); - cy.get('[name="formValues.streams.0.urlPath"]').clear(); - cy.get('[name="formValues.streams.0.urlPath"]').type(urlPath); + focusAndType(streamUrlPathFromForm, "{selectAll}{backspace}"); + cy.get(streamUrlPathFromForm).type(urlPath); }; export const submitForm = () => { diff --git a/airbyte-webapp/package.json b/airbyte-webapp/package.json index 148d8173892..54a45e5a914 100644 --- a/airbyte-webapp/package.json +++ b/airbyte-webapp/package.json @@ -33,7 +33,7 @@ "validate-lock": "node ./scripts/validate-lock-files.js", "preanalyze-lowcode": "TS_NODE_TRANSPILE_ONLY=true pnpm run generate-client", "analyze-lowcode": "ts-node --skip-project ./scripts/analyze-low-code-manifests.ts", - "cypress:open": "CYPRESS_AIRBYTE_SERVER_BASE_URL=${CYPRESS_AIRBYTE_SERVER_BASE_URL:-http://localhost:8001} cypress open --config-file cypress/cypress.config.ts", + "cypress:open": "CYPRESS_AIRBYTE_SERVER_BASE_URL=${CYPRESS_AIRBYTE_SERVER_BASE_URL:-https://local.airbyte.dev} cypress open --config-file cypress/cypress.config.ts", "precypress:run": "TS_NODE_TRANSPILE_ONLY=true pnpm run generate-client", "cypress:run": "CYPRESS_BASE_URL=${CYPRESS_BASE_URL:-http://localhost:8000} CYPRESS_AIRBYTE_SERVER_BASE_URL=${CYPRESS_AIRBYTE_SERVER_BASE_URL:-http://localhost:8001} cypress run --config-file cypress/cypress.config.ts", "createdbsource": "docker run --rm -d -p 5433:5432 -e POSTGRES_PASSWORD=secret_password -e POSTGRES_DB=airbyte_ci_source --name airbyte_ci_pg_source postgres", diff --git a/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx b/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx index 6441fa4fde8..7b8cebb9c90 100644 --- a/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx +++ b/airbyte-webapp/src/area/connection/components/AttemptDetails/AttemptDetails.tsx @@ -4,7 +4,8 @@ import { FormattedDate, FormattedMessage, useIntl } from "react-intl"; import { FlexContainer } from "components/ui/Flex"; import { Text } from "components/ui/Text"; -import { AttemptRead, AttemptStats, AttemptStatus, FailureReason, FailureType } from "core/api/types/AirbyteClient"; +import { useAttemptCombinedStatsForJob } from "core/api"; +import { AttemptRead, AttemptStatus, FailureReason, FailureType } from "core/api/types/AirbyteClient"; import { formatBytes } from "core/utils/numberHelper"; import { useFormatLengthOfTime } from "core/utils/time"; @@ -19,11 +20,10 @@ interface AttemptDetailsProps { className?: string; attempt: AttemptRead; hasMultipleAttempts?: boolean; - jobId: string; + jobId: number; isPartialSuccess?: boolean; showEndedAt?: boolean; showFailureMessage?: boolean; - aggregatedAttemptStats?: AttemptStats; } export const AttemptDetails: React.FC = ({ @@ -33,8 +33,8 @@ export const AttemptDetails: React.FC = ({ isPartialSuccess, showEndedAt = false, showFailureMessage = true, - aggregatedAttemptStats, }) => { + const { data: aggregatedAttemptStats } = useAttemptCombinedStatsForJob(jobId, attempt); const { formatMessage } = useIntl(); const attemptRunTime = useFormatLengthOfTime((attempt.updatedAt - attempt.createdAt) * 1000); diff --git a/airbyte-webapp/src/area/connection/components/HistoricalOverview/ChartConfig.tsx b/airbyte-webapp/src/area/connection/components/HistoricalOverview/ChartConfig.tsx index c70f0e63635..7f08d87d666 100644 --- a/airbyte-webapp/src/area/connection/components/HistoricalOverview/ChartConfig.tsx +++ b/airbyte-webapp/src/area/connection/components/HistoricalOverview/ChartConfig.tsx @@ -129,8 +129,7 @@ export const ClickToJob = (chartState: CategoricalChartState & { height: number openJobLogsModal({ openModal, jobId, - connectionId: connection.connectionId, - connectionName: connection.name, + connection, }); return ( diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.module.scss b/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.module.scss index 9713c8ea7d8..0220fb2abce 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.module.scss +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.module.scss @@ -31,6 +31,55 @@ &__lineLogContent { position: relative; + flex-grow: 1; + } + + &__timestamp { + align-self: flex-start; + } + + &__logSource { + align-self: flex-start; + + &--source { + background-color: colors.$ansi-blue-bg; + } + + &--destination { + background-color: colors.$ansi-yellow-bg; + } + + &--replicationOrchestrator { + background-color: colors.$ansi-cyan-bg; + } + + &--platform { + background-color: colors.$ansi-cyan-bg; + } + } + + &__level { + align-self: flex-start; + + &--warn { + color: colors.$ansi-yellow-fg; + } + + &--error { + color: colors.$ansi-yellow-fg; + } + + &--info { + color: colors.$ansi-cyan-fg; + } + + &--debug { + color: colors.$ansi-cyan-fg; + } + + &--trace { + color: colors.$ansi-cyan-fg; + } } &__searchMatch { diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.test.ts b/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.test.ts index bc237dfbe36..f0a4f175c41 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.test.ts +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.test.ts @@ -1,4 +1,4 @@ -import { getSearchMatchesInLine, sanitizeHtml } from "./VirtualLogs"; +import { sanitizeHtml } from "./VirtualLogs"; describe(`${sanitizeHtml.name}`, () => { it("should return a normal logLine as it is", () => { @@ -11,29 +11,3 @@ describe(`${sanitizeHtml.name}`, () => { ); }); }); - -describe(`${getSearchMatchesInLine.name}`, () => { - it("should return empty array if no match", () => { - expect(getSearchMatchesInLine("no match", "zzz")).toEqual([]); - }); - - it("should return an array with one match if there is a single match", () => { - expect(getSearchMatchesInLine("a b c d", "a")).toEqual([{ precedingNewlines: 0, characterOffsetLeft: 0 }]); - }); - - it("should return an array with multiple matches if there are multiple matches", () => { - expect(getSearchMatchesInLine("a b c d a a", "a")).toEqual([ - { precedingNewlines: 0, characterOffsetLeft: 0 }, - { precedingNewlines: 0, characterOffsetLeft: 8 }, - { precedingNewlines: 0, characterOffsetLeft: 10 }, - ]); - }); - - it("should calculate the correct preceding newlines", () => { - expect(getSearchMatchesInLine("a b c\na b a", "a")).toEqual([ - { precedingNewlines: 0, characterOffsetLeft: 0 }, - { precedingNewlines: 1, characterOffsetLeft: 0 }, - { precedingNewlines: 1, characterOffsetLeft: 4 }, - ]); - }); -}); diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.tsx b/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.tsx index cf54e5368c9..9f8ed28cc80 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.tsx +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/VirtualLogs.tsx @@ -1,11 +1,11 @@ import Anser from "anser"; import classNames from "classnames"; import React, { HTMLAttributes, useEffect, useRef } from "react"; +import Highlighter from "react-highlight-words"; import { FormattedMessage } from "react-intl"; import { Virtuoso, ItemContent, VirtuosoHandle } from "react-virtuoso"; import sanitize from "sanitize-html"; -import { FlexContainer } from "components/ui/Flex"; import { Text } from "components/ui/Text"; import { CleanedLogLines } from "./useCleanLogs"; @@ -15,9 +15,9 @@ interface VirtualLogsProps { logLines: CleanedLogLines; searchTerm?: string; scrollTo?: number; - selectedAttempt?: number; + attemptId: number; hasFailure: boolean; - attemptHasStructuredLogs: boolean; + showStructuredLogs: boolean; } function escapeRegex(string: string) { @@ -45,6 +45,7 @@ export const sanitizeHtml = (logLine: string) => { interface RowContext { searchTerm?: string; highlightedRowIndex?: number; + showStructuredLogs: boolean; } const LogLine: React.FC> = (props) => ( @@ -56,9 +57,9 @@ const VirtualLogsUnmemoized: React.FC = ({ logLines, searchTerm, scrollTo, - selectedAttempt, + attemptId, hasFailure, - attemptHasStructuredLogs, + showStructuredLogs, }) => { const listRef = useRef(null); const highlightedRowIndex = scrollTo; @@ -77,15 +78,6 @@ const VirtualLogsUnmemoized: React.FC = ({ )} - {/** - * Structured logs are currently not supported in the UI - * https://github.com/airbytehq/airbyte-internal-issues/issues/10476 - */} - {attemptHasStructuredLogs && ( - - Structured logs are currently not supported in the UI. Download the logs to view them. - - )} {logLines && ( ref={listRef} @@ -96,11 +88,11 @@ const VirtualLogsUnmemoized: React.FC = ({ // scroll, which results in not positioning at the bottom (isAtBottom) => isAtBottom && (hasFailure ? true : "smooth") } - key={selectedAttempt} + key={attemptId} style={{ width: "100%", height: "100%" }} data={logLines} itemContent={Row} - context={{ searchTerm, highlightedRowIndex }} + context={{ searchTerm, highlightedRowIndex, showStructuredLogs }} atBottomThreshold={50 /* covers edge case(s) where Virtuoso doesn't scroll all the way to the bottom */} increaseViewportBy={150} components={{ @@ -162,10 +154,31 @@ export const getSearchMatchesInLine = (text: string, searchTerm?: string) => { return matchIndices; }; +const HighlightedSearchMatches = React.memo(({ text, searchTerm }: { text: string; searchTerm?: string }) => { + const searchMatchesInLine = getSearchMatchesInLine(expandTabs(text), searchTerm); + + return ( + <> + {searchMatchesInLine.map((match, index) => ( + + {searchTerm} + + ))} + + ); +}); +HighlightedSearchMatches.displayName = "HighlightedSearchMatches"; + const Row: ItemContent = (index, item, context) => { const rowIsHighlighted = context.highlightedRowIndex === index; const html = Anser.ansiToHtml(expandTabs(item.original), { use_classes: true }); - const searchMatchesInLine = getSearchMatchesInLine(expandTabs(item.text), context.searchTerm); return (
= (index, item, cont >
{item.lineNumber}
- {searchMatchesInLine.length > 0 && - searchMatchesInLine.map(({ characterOffsetLeft: characterIndex, precedingNewlines }, matchIndex) => ( -
+ + + - ))} - + + + + + + )} + {!context.showStructuredLogs && ( + <> + + + + )}
); }; +// JSX makes this hard. When copy and pasting, we want a space between the elements. +const SpaceForCopy = () => <> ; + // The length of the logLines is a fine proxy to tell if they have changed, which can avoid re-renders export const VirtualLogs = React.memo( VirtualLogsUnmemoized, @@ -200,6 +245,6 @@ export const VirtualLogs = React.memo( prevProps.logLines.length === nextProps.logLines.length && prevProps.searchTerm === nextProps.searchTerm && prevProps.scrollTo === nextProps.scrollTo && - prevProps.selectedAttempt === nextProps.selectedAttempt && - prevProps.attemptHasStructuredLogs === nextProps.attemptHasStructuredLogs + prevProps.attemptId === nextProps.attemptId && + prevProps.showStructuredLogs === nextProps.showStructuredLogs ); diff --git a/airbyte-webapp/src/area/connection/components/JobHistoryItem/useCleanLogs.tsx b/airbyte-webapp/src/area/connection/components/JobHistoryItem/useCleanLogs.tsx index a5a0a067f6d..cfa71fcfda5 100644 --- a/airbyte-webapp/src/area/connection/components/JobHistoryItem/useCleanLogs.tsx +++ b/airbyte-webapp/src/area/connection/components/JobHistoryItem/useCleanLogs.tsx @@ -1,11 +1,13 @@ import Anser from "anser"; +import dayjs from "dayjs"; import { useMemo } from "react"; -import { attemptHasFormattedLogs } from "core/api"; -import { AttemptInfoRead } from "core/api/types/AirbyteClient"; +import { attemptHasFormattedLogs, attemptHasStructuredLogs } from "core/api"; +import { AttemptInfoRead, LogEvent, LogLevel, LogSource } from "core/api/types/AirbyteClient"; export interface CleanedLogs { - origins: JobLogOrigins[]; + sources: LogSource[]; + levels: LogLevel[]; logLines: CleanedLogLines; } @@ -13,67 +15,87 @@ export type CleanedLogLines = Array<{ lineNumber: number; original: string; text: string; - domain?: JobLogOrigins; + level?: LogLevel; + source?: LogSource; + timestamp?: string; }>; -export enum JobLogOrigins { - Destination = "destination", - Platform = "platform", - Other = "other", - ReplicationOrchestrator = "replication-orchestrator", - Source = "source", -} - -export const KNOWN_LOG_ORIGINS = [ +// This can be removed once we switch over entirely to structured logs +// https://github.com/airbytehq/airbyte-internal-issues/issues/10658 +export const LOG_SOURCE_REGEX_MAP = [ { - key: JobLogOrigins.ReplicationOrchestrator, + key: LogSource["replication-orchestrator"], regex: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} replication-orchestrator/, }, - { key: JobLogOrigins.Source, regex: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} source/ }, - { key: JobLogOrigins.Destination, regex: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} destination/ }, - { key: JobLogOrigins.Platform, regex: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} platform/ }, - // If the log starts with a timestamp but then doesn't match any of the above, it's considered not matching any domain - // which is helpful to start a new block of log lines that does not have any color-coding - { key: JobLogOrigins.Other, regex: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} / }, + { key: LogSource.source, regex: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} source/ }, + { key: LogSource.destination, regex: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} destination/ }, + { key: LogSource.platform, regex: /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} platform/ }, ]; +export const LOG_LEVELS: LogLevel[] = ["info", "warn", "error", "debug", "trace"]; + /** * useCleanLogs iterates through each log line of each attempt and transforms it to be more easily consumed by the UI. */ export const useCleanLogs = (attempt: AttemptInfoRead): CleanedLogs => { return useMemo(() => { - const origins: JobLogOrigins[] = []; - // Some logs are multi-line, so we want to associate those lines (which might not have the correct prefix) with the last domain that was detected - let lastDomain: JobLogOrigins | undefined; + const levels = new Set(); + const sources = new Set(); if (attemptHasFormattedLogs(attempt)) { const logLines = attempt.logs.logLines.map((line, index) => { const text = Anser.ansiToText(line); - const domain = KNOWN_LOG_ORIGINS.find((domain) => domain.regex.test(text))?.key; - if (domain) { - lastDomain = domain; - if (!origins.includes(domain)) { - origins.push(domain); - } + const source = LOG_SOURCE_REGEX_MAP.find((source) => source.regex.test(text))?.key; + if (source) { + sources.add(source); } return { lineNumber: index + 1, original: line, text, - domain: domain ?? lastDomain, + source, }; }); return { - origins, + sources: [...sources], + levels: [...levels], logLines, }; } - // Structured logs are currently not supported in the UI: - // https://github.com/airbytehq/airbyte-internal-issues/issues/10476 - return { - origins, - logLines: [], - }; + if (attemptHasStructuredLogs(attempt)) { + const logLines = attempt.logs.events.map((event, index) => { + levels.add(event.level); + const messageWithoutLogLevel = event.message.replace(beginsWithLogLevel, ""); + return { + lineNumber: index + 1, + original: formatLogEvent({ ...event, message: messageWithoutLogLevel }), + text: messageWithoutLogLevel, + source: event.logSource, + level: event.level, + timestamp: formatLogEventTimestamp(event.timestamp), + }; + }); + return { + sources: [...sources], + levels: [...levels], + logLines, + }; + } + + throw new Error("Log format unsupported. Only formatted or structured logs are supported."); }, [attempt]); }; + +// Filters out the log level from the beginning of the log message, because connector logs hard-code this as part of +// the log message. Structured logs from the platform (using logback) do not have this issue. +const beginsWithLogLevel = new RegExp(`^(${LOG_LEVELS.map((level) => level.toUpperCase()).join("|")})\\s*`); + +export function formatLogEvent(event: LogEvent): string { + return `${formatLogEventTimestamp(event.timestamp)} ${event.level} ${event.message}`; +} + +function formatLogEventTimestamp(unixTimestamp: number): string { + // Intentionally not internationalized to match expectations for log timestamps + return dayjs(unixTimestamp).format("YYYY-MM-DD HH:mm:ss"); +} diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/AttemptLogs.tsx b/airbyte-webapp/src/area/connection/components/JobLogsModal/AttemptLogs.tsx new file mode 100644 index 00000000000..2ae20b26682 --- /dev/null +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/AttemptLogs.tsx @@ -0,0 +1,250 @@ +import { useCallback, useEffect, useMemo, useRef, useState } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; +import { useDebounce } from "react-use"; + +import { Box } from "components/ui/Box"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { MultiListBox } from "components/ui/ListBox/MultiListBox"; +import { Switch } from "components/ui/Switch"; +import { Text } from "components/ui/Text"; + +import { LogSearchInput } from "area/connection/components/JobHistoryItem/LogSearchInput"; +import { LOG_LEVELS, LOG_SOURCE_REGEX_MAP, useCleanLogs } from "area/connection/components/JobHistoryItem/useCleanLogs"; +import { VirtualLogs } from "area/connection/components/JobHistoryItem/VirtualLogs"; +import { attemptHasStructuredLogs, AttemptInfoReadWithLogs } from "core/api"; +import { LogLevel, LogSource } from "core/api/types/AirbyteClient"; + +import { JobLogsModalFailureMessage } from "./JobLogsModalFailureMessage"; + +interface AttemptLogsProps { + attempt: AttemptInfoReadWithLogs; +} + +export const AttemptLogs: React.FC = ({ attempt }) => { + const searchInputRef = useRef(null); + + const [inputValue, setInputValue] = useState(""); + const [highlightedMatchIndex, setHighlightedMatchIndex] = useState(undefined); + const [matchingLines, setMatchingLines] = useState([]); + const highlightedMatchingLineNumber = highlightedMatchIndex !== undefined ? highlightedMatchIndex + 1 : undefined; + + const showStructuredLogs = attempt && attemptHasStructuredLogs(attempt); + + const { logLines, sources, levels } = useCleanLogs(attempt); + const [selectedLogLevels, setSelectedLogLevels] = useState(LOG_LEVELS); + const [selectedLogSources, setSelectedLogSources] = useState(LOG_SOURCE_REGEX_MAP.map(({ key }) => key)); + const firstMatchIndex = 0; + const lastMatchIndex = matchingLines.length - 1; + const [debouncedSearchTerm, setDebouncedSearchTerm] = useState(""); + const scrollTo = useMemo( + () => (matchingLines && highlightedMatchIndex !== undefined ? matchingLines[highlightedMatchIndex] : undefined), + [matchingLines, highlightedMatchIndex] + ); + const { formatMessage } = useIntl(); + + const logLevelOptions = useMemo>( + () => + LOG_LEVELS.map((level) => { + return { label: formatMessage({ id: `jobHistory.logs.logLevel.${level}` }), value: level }; + }), + [formatMessage] + ); + + const logSourceOptions = useMemo>( + () => + LOG_SOURCE_REGEX_MAP.map(({ key }) => { + return { label: formatMessage({ id: `jobHistory.logs.logSource.${key}` }), value: key }; + }), + [formatMessage] + ); + + const onSelectLogSource = useCallback( + (source: LogSource) => { + if (!selectedLogSources) { + setSelectedLogSources(sources.filter((s) => s !== source)); + } else { + setSelectedLogSources( + selectedLogSources.includes(source) + ? selectedLogSources.filter((s) => s !== source) + : [...selectedLogSources, source] + ); + } + }, + [sources, selectedLogSources] + ); + + const filteredLogLines = useMemo(() => { + return logLines.filter((line) => { + if (line.source && !selectedLogSources?.includes(line.source)) { + return false; + } + if (line.level && !selectedLogLevels?.includes(line.level)) { + return false; + } + return true; + }); + }, [logLines, selectedLogSources, selectedLogLevels]); + + // Debounces changes to the search input so we don't recompute the matching lines on every keystroke + useDebounce( + () => { + setDebouncedSearchTerm(inputValue); + setHighlightedMatchIndex(undefined); + const searchTermLowerCase = inputValue.toLowerCase(); + if (inputValue.length > 0) { + const matchingLines: number[] = []; + filteredLogLines.forEach((line, index) => { + return line.original.toLocaleLowerCase().includes(searchTermLowerCase) && matchingLines.push(index); + }); + setMatchingLines(matchingLines); + if (matchingLines.length > 0) { + setHighlightedMatchIndex(firstMatchIndex); + } else { + setHighlightedMatchIndex(undefined); + } + } else { + setMatchingLines([]); + setHighlightedMatchIndex(undefined); + } + }, + 150, + [inputValue, filteredLogLines] + ); + + const onSearchTermChange = (searchTerm: string) => { + setInputValue(searchTerm); + }; + + const onSearchInputKeydown = (e: React.KeyboardEvent) => { + if (e.shiftKey && e.key === "Enter") { + e.preventDefault(); + scrollToPreviousMatch(); + } else if (e.key === "Enter") { + e.preventDefault(); + scrollToNextMatch(); + } + }; + + const scrollToPreviousMatch = () => { + if (matchingLines.length === 0) { + return; + } + if (highlightedMatchIndex === undefined) { + setHighlightedMatchIndex(lastMatchIndex); + } else { + setHighlightedMatchIndex(highlightedMatchIndex === firstMatchIndex ? lastMatchIndex : highlightedMatchIndex - 1); + } + searchInputRef.current?.focus(); + }; + + const scrollToNextMatch = () => { + if (matchingLines.length === 0) { + return; + } + if (highlightedMatchIndex === undefined) { + setHighlightedMatchIndex(firstMatchIndex); + } else { + setHighlightedMatchIndex(highlightedMatchIndex === lastMatchIndex ? firstMatchIndex : highlightedMatchIndex + 1); + } + searchInputRef.current?.focus(); + }; + + // Focus the search input with cmd + f / ctrl + f + // Clear search input on `esc`, if search input is focused + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if (e.key === "f" && (navigator.platform.toLowerCase().includes("mac") ? e.metaKey : e.ctrlKey)) { + e.preventDefault(); + searchInputRef.current?.focus(); + } else if (e.key === "Escape" && document.activeElement === searchInputRef.current) { + if (inputValue.length > 0) { + e.preventDefault(); + setInputValue(""); + } + } + }; + document.body.addEventListener("keydown", handleKeyDown); + return () => document.body.removeEventListener("keydown", handleKeyDown); + }, [inputValue]); + + return ( + <> + + + + + + + {showStructuredLogs && ( + <> + + setSelectedLogSources(newSources ?? sources)} + label="Log sources" + /> + + + setSelectedLogLevels(newLevels ?? levels)} + label="Log levels" + /> + + + )} + + + + {sources.length > 0 && ( + + + {logSourceOptions.map((option) => ( + + ))} + + + )} + + {logLines.length === 0 && ( + + + + + + + + )} + + + + ); +}; diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/DownloadLogsButton.tsx b/airbyte-webapp/src/area/connection/components/JobLogsModal/DownloadLogsButton.tsx index 1fab73ebb90..ae480b42700 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/DownloadLogsButton.tsx +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/DownloadLogsButton.tsx @@ -4,31 +4,18 @@ import { FormattedMessage, useIntl } from "react-intl"; import { Button } from "components/ui/Button"; import { Tooltip } from "components/ui/Tooltip"; -import { CleanedLogLines } from "area/connection/components/JobHistoryItem/useCleanLogs"; -import { useCurrentWorkspace } from "core/api"; -import { downloadFile, FILE_TYPE_DOWNLOAD, fileizeString } from "core/utils/file"; - interface DownloadButtonProps { - logLines: CleanedLogLines; - fileName: string; + downloadLogs: () => void; } -export const DownloadLogsButton: React.FC = ({ logLines, fileName }) => { +export const DownloadLogsButton: React.FC = ({ downloadLogs }) => { const { formatMessage } = useIntl(); - const { name } = useCurrentWorkspace(); - - const downloadFileWithLogs = () => { - const file = new Blob([logLines.map((logLine) => logLine.text).join("\n")], { - type: FILE_TYPE_DOWNLOAD, - }); - downloadFile(file, fileizeString(`${name}-${fileName}.txt`)); - }; return ( = ({ jobId, initialAttemptId, eventId, connectionId }) => { +export const JobLogsModal: React.FC = ({ jobId, initialAttemptId, eventId, connection }) => { const job = useJobInfoWithoutLogs(jobId); if (job.attempts.length === 0) { @@ -50,47 +41,21 @@ export const JobLogsModal: React.FC = ({ jobId, initialAttemp } return ( - + ); }; -const JobLogsModalInner: React.FC = ({ jobId, initialAttemptId, eventId, connectionId }) => { - const searchInputRef = useRef(null); +const JobLogsModalInner: React.FC = ({ jobId, initialAttemptId, eventId, connection }) => { const job = useJobInfoWithoutLogs(jobId); - const showStructuredLogsUI = useExperiment("logs.structured-logs-ui"); - - const [inputValue, setInputValue] = useState(""); - const [highlightedMatchIndex, setHighlightedMatchIndex] = useState(undefined); - const [matchingLines, setMatchingLines] = useState([]); - const highlightedMatchingLineNumber = highlightedMatchIndex !== undefined ? highlightedMatchIndex + 1 : undefined; const [selectedAttemptId, setSelectedAttemptId] = useState( initialAttemptId ?? job.attempts[job.attempts.length - 1].attempt.id ); - const jobAttempt = useAttemptForJob(jobId, selectedAttemptId); - const aggregatedAttemptStats = useAttemptCombinedStatsForJob(jobId, selectedAttemptId, { - refetchInterval() { - // if the attempt hasn't ended refetch every 2.5 seconds - return jobAttempt.attempt.endedAt ? false : 2500; - }, - }); - const { logLines, origins } = useCleanLogs(jobAttempt); - const [selectedLogOrigins, setSelectedLogOrigins] = useState( - KNOWN_LOG_ORIGINS.map(({ key }) => key) - ); - const firstMatchIndex = 0; - const lastMatchIndex = matchingLines.length - 1; - const [debouncedSearchTerm, setDebouncedSearchTerm] = useState(""); - const scrollTo = useMemo( - () => (matchingLines && highlightedMatchIndex !== undefined ? matchingLines[highlightedMatchIndex] : undefined), - [matchingLines, highlightedMatchIndex] - ); + const { data: jobAttempt } = useAttemptForJob(jobId, selectedAttemptId); + + const downloadLogs = useDonwnloadJobLogsFetchQuery(); + const { formatMessage } = useIntl(); const attemptListboxOptions = useMemo(() => { @@ -104,124 +69,8 @@ const JobLogsModalInner: React.FC = ({ jobId, initialAttemptI })); }, [job, formatMessage]); - const onSelectAttempt = (selectedAttemptId: number) => { - setSelectedAttemptId(selectedAttemptId); - setHighlightedMatchIndex(undefined); - setMatchingLines([]); - setInputValue(""); - }; - - const logOriginOptions = useMemo>( - () => - KNOWN_LOG_ORIGINS.map(({ key }) => { - return { label: formatMessage({ id: `jobHistory.logs.logOrigin.${key}` }), value: key }; - }), - [formatMessage] - ); - - const onSelectLogOrigin = useCallback( - (origin: JobLogOrigins) => { - if (!selectedLogOrigins) { - setSelectedLogOrigins(origins.filter((o) => o !== origin)); - } else { - setSelectedLogOrigins( - selectedLogOrigins.includes(origin) - ? selectedLogOrigins.filter((o) => o !== origin) - : [...selectedLogOrigins, origin] - ); - } - }, - [origins, selectedLogOrigins] - ); - - const filteredLogLines = useMemo(() => { - return logLines.filter((line) => selectedLogOrigins?.includes(line.domain ?? JobLogOrigins.Other) ?? true); - }, [logLines, selectedLogOrigins]); - - // Debounces changes to the search input so we don't recompute the matching lines on every keystroke - useDebounce( - () => { - setDebouncedSearchTerm(inputValue); - setHighlightedMatchIndex(undefined); - const searchTermLowerCase = inputValue.toLowerCase(); - if (inputValue.length > 0) { - const matchingLines: number[] = []; - filteredLogLines.forEach((line, index) => { - return line.text.toLocaleLowerCase().includes(searchTermLowerCase) && matchingLines.push(index); - }); - setMatchingLines(matchingLines); - if (matchingLines.length > 0) { - setHighlightedMatchIndex(firstMatchIndex); - } else { - setHighlightedMatchIndex(undefined); - } - } else { - setMatchingLines([]); - setHighlightedMatchIndex(undefined); - } - }, - 150, - [inputValue, filteredLogLines] - ); - - const onSearchTermChange = (searchTerm: string) => { - setInputValue(searchTerm); - }; - - const onSearchInputKeydown = (e: React.KeyboardEvent) => { - if (e.shiftKey && e.key === "Enter") { - e.preventDefault(); - scrollToPreviousMatch(); - } else if (e.key === "Enter") { - e.preventDefault(); - scrollToNextMatch(); - } - }; - - const scrollToPreviousMatch = () => { - if (matchingLines.length === 0) { - return; - } - if (highlightedMatchIndex === undefined) { - setHighlightedMatchIndex(lastMatchIndex); - } else { - setHighlightedMatchIndex(highlightedMatchIndex === firstMatchIndex ? lastMatchIndex : highlightedMatchIndex - 1); - } - searchInputRef.current?.focus(); - }; - - const scrollToNextMatch = () => { - if (matchingLines.length === 0) { - return; - } - if (highlightedMatchIndex === undefined) { - setHighlightedMatchIndex(firstMatchIndex); - } else { - setHighlightedMatchIndex(highlightedMatchIndex === lastMatchIndex ? firstMatchIndex : highlightedMatchIndex + 1); - } - searchInputRef.current?.focus(); - }; - - // Focus the search input with cmd + f / ctrl + f - // Clear search input on `esc`, if search input is focused - useEffect(() => { - const handleKeyDown = (e: KeyboardEvent) => { - if (e.key === "f" && (navigator.platform.toLowerCase().includes("mac") ? e.metaKey : e.ctrlKey)) { - e.preventDefault(); - searchInputRef.current?.focus(); - } else if (e.key === "Escape" && document.activeElement === searchInputRef.current) { - if (inputValue.length > 0) { - e.preventDefault(); - setInputValue(""); - } - } - }; - document.body.addEventListener("keydown", handleKeyDown); - return () => document.body.removeEventListener("keydown", handleKeyDown); - }, [inputValue]); - return ( - +
@@ -229,83 +78,37 @@ const JobLogsModalInner: React.FC = ({ jobId, initialAttemptI className={styles.attemptDropdown__listbox} selectedValue={selectedAttemptId} options={attemptListboxOptions} - onSelect={onSelectAttempt} + onSelect={setSelectedAttemptId} isDisabled={job.attempts.length === 1} />
- + {jobAttempt ? ( + + ) : ( + + + + )} - + downloadLogs(connection.name, jobId)} />
- - - - - - - {showStructuredLogsUI && ( - - setSelectedLogOrigins(newOrigins ?? origins)} - label="Log sources" - /> - - )} - - - - {origins.length > 0 && ( - - - {logOriginOptions.map((option) => ( - - ))} - - + {jobAttempt && } + {!jobAttempt && ( +
+ + + + +
)} -
); }; diff --git a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx index 0edcd04e866..8c85e045ac0 100644 --- a/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx +++ b/airbyte-webapp/src/area/connection/components/JobLogsModal/JobLogsModalFailureMessage.tsx @@ -1,14 +1,12 @@ import { FormattedMessage, useIntl } from "react-intl"; import { Box } from "components/ui/Box"; -import { Button } from "components/ui/Button"; +import { Collapsible } from "components/ui/Collapsible"; import { FlexContainer } from "components/ui/Flex"; import { Message } from "components/ui/Message"; import { AttemptFailureSummary, FailureType } from "core/api/types/AirbyteClient"; -import { copyToClipboard } from "core/utils/clipboard"; import { failureUiDetailsFromReason } from "core/utils/errorStatusMessage"; -import { useNotificationService } from "hooks/services/Notification"; import styles from "./JobLogsModalFailureMessage.module.scss"; @@ -17,7 +15,6 @@ interface JobLogsModalFailureMessageProps { } export const JobLogsModalFailureMessage: React.FC = ({ failureSummary }) => { - const { registerNotification } = useNotificationService(); const { formatMessage } = useIntl(); const failureUiDetails = failureUiDetailsFromReason(failureSummary?.failures[0], formatMessage); @@ -30,19 +27,6 @@ export const JobLogsModalFailureMessage: React.FC { - if (!failureUiDetails.secondaryMessage) { - return; - } - await copyToClipboard(failureUiDetails.secondaryMessage); - - registerNotification({ - type: "success", - text: formatMessage({ id: "jobs.failure.copyText.success" }), - id: "jobs.failure.copyText.success", - }); - }; - return (
@@ -54,16 +38,14 @@ export const JobLogsModalFailureMessage: React.FC - - {failureUiDetails.secondaryMessage && ( - - )} } > - {failureUiDetails.secondaryMessage} + {failureUiDetails.secondaryMessage && ( + + {failureUiDetails.secondaryMessage} + + )}
diff --git a/airbyte-webapp/src/components/ScrollableContainer/ScrollableContainer.module.scss b/airbyte-webapp/src/components/ScrollableContainer/ScrollableContainer.module.scss deleted file mode 100644 index a741d40f553..00000000000 --- a/airbyte-webapp/src/components/ScrollableContainer/ScrollableContainer.module.scss +++ /dev/null @@ -1,6 +0,0 @@ -@use "scss/variables"; - -.container { - overflow-y: auto; - padding: variables.$spacing-xl; -} diff --git a/airbyte-webapp/src/components/ScrollableContainer/ScrollableContainer.tsx b/airbyte-webapp/src/components/ScrollableContainer/ScrollableContainer.tsx deleted file mode 100644 index 2260a3e3bad..00000000000 --- a/airbyte-webapp/src/components/ScrollableContainer/ScrollableContainer.tsx +++ /dev/null @@ -1,17 +0,0 @@ -import classNames from "classnames"; -import { forwardRef, PropsWithChildren } from "react"; - -import styles from "./ScrollableContainer.module.scss"; - -/** - * Layout component for ConnectionPage to define the scrollable area and pinpoint the container for Virtuoso, - * in case we need to virtualize something on the page. - */ -export const ScrollableContainer = forwardRef( - ({ children, className, ...restProps }, outerRef) => ( -
- {children} -
- ) -); -ScrollableContainer.displayName = "ScrollableContainer"; diff --git a/airbyte-webapp/src/components/ScrollableContainer/index.ts b/airbyte-webapp/src/components/ScrollableContainer/index.ts deleted file mode 100644 index add9a82db63..00000000000 --- a/airbyte-webapp/src/components/ScrollableContainer/index.ts +++ /dev/null @@ -1 +0,0 @@ -export { ScrollableContainer } from "./ScrollableContainer"; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts b/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts index 7e92e5d780e..cab1baf750b 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/schema.ts @@ -15,24 +15,12 @@ import { StreamMapperType, } from "core/api/types/AirbyteClient"; import { traverseSchemaToField } from "core/domain/catalog"; -import { FeatureItem, useFeature } from "core/services/features"; import { NON_I18N_ERROR_TYPE } from "core/utils/form"; -export const I18N_KEY_UNDER_ONE_HOUR_NOT_ALLOWED = "form.cronExpression.underOneHourNotAllowed"; - -function nextExecutionsMoreFrequentThanOncePerHour(nextExecutions: number[]): boolean { - if (nextExecutions.length > 1) { - const [firstExecution, secondExecution] = nextExecutions; - return secondExecution - firstExecution < 3600; - } - return false; -} - /** * yup schema for the schedule data */ export const useGetScheduleDataSchema = () => { - const allowSubOneHourCronExpressions = useFeature(FeatureItem.AllowSyncSubOneHourCronExpressions); const validateCronExpression = useDescribeCronExpressionFetchQuery(); return useMemo(() => { @@ -75,15 +63,6 @@ export const useGetScheduleDataSchema = () => { type: NON_I18N_ERROR_TYPE, }); } - if ( - !allowSubOneHourCronExpressions && - nextExecutionsMoreFrequentThanOncePerHour(response.nextExecutions) - ) { - return createError({ - path, - message: I18N_KEY_UNDER_ONE_HOUR_NOT_ALLOWED, - }); - } } catch (error) { return createError({ path, @@ -98,7 +77,7 @@ export const useGetScheduleDataSchema = () => { .defined("form.empty.error"), }); }); - }, [validateCronExpression, allowSubOneHourCronExpressions]); + }, [validateCronExpression]); }; /** diff --git a/airbyte-webapp/src/components/connection/ConnectionStatus/useConnectionStatus.ts b/airbyte-webapp/src/components/connection/ConnectionStatus/useConnectionStatus.ts index c59e2ca0f21..b6314d42ce9 100644 --- a/airbyte-webapp/src/components/connection/ConnectionStatus/useConnectionStatus.ts +++ b/airbyte-webapp/src/components/connection/ConnectionStatus/useConnectionStatus.ts @@ -1,7 +1,13 @@ import dayjs from "dayjs"; import { useGetConnectionSyncProgress, useListConnectionsStatuses } from "core/api"; -import { ConnectionSyncStatus, FailureReason } from "core/api/types/AirbyteClient"; +import { + ConnectionStatusesRead, + ConnectionSyncStatus, + FailureOrigin, + FailureReason, + FailureType, +} from "core/api/types/AirbyteClient"; import { moveTimeToFutureByPeriod } from "core/utils/time"; export interface UIConnectionStatus { @@ -21,7 +27,7 @@ export interface UIConnectionStatus { export const useConnectionStatus = (connectionId: string): UIConnectionStatus => { const connectionStatuses = useListConnectionsStatuses([connectionId]); - const connectionStatus = connectionStatuses[0]; + const connectionStatus = selectConnectionStatus(connectionStatuses); const { connectionSyncStatus: status, @@ -57,3 +63,21 @@ export const useConnectionStatus = (connectionId: string): UIConnectionStatus => recordsLoaded: syncProgress?.recordsEmitted, }; }; + +const selectConnectionStatus = (connectionStatusReads: ConnectionStatusesRead) => { + const configErrors = connectionStatusReads.filter( + (status) => status.failureReason?.failureType === FailureType.config_error + ); + if (configErrors.length > 0) { + return configErrors[0]; + } + + const sourceErrors = connectionStatusReads.filter( + (status) => status.failureReason?.failureOrigin === FailureOrigin.source + ); + if (sourceErrors.length > 0) { + return sourceErrors[0]; + } + + return connectionStatusReads[0]; +}; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.tsx index b34f554e013..edabd21ce92 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/CreateConnectionForm.tsx @@ -1,13 +1,15 @@ import { useQueryClient } from "@tanstack/react-query"; import React, { Suspense, useCallback, useEffect } from "react"; +import { UseFormReturn } from "react-hook-form"; import { useIntl } from "react-intl"; import { useNavigate } from "react-router-dom"; import { Form } from "components/forms"; import LoadingSchema from "components/LoadingSchema"; +import { ScrollParent } from "components/ui/ScrollParent"; import { useGetDestinationFromSearchParams, useGetSourceFromSearchParams } from "area/connector/utils"; -import { connectionsKeys, useCreateConnection, useDiscoverSchema } from "core/api"; +import { connectionsKeys, HttpError, HttpProblem, useCreateConnection, useDiscoverSchema } from "core/api"; import { ConnectionScheduleType } from "core/api/types/AirbyteClient"; import { ConnectionFormServiceProvider, @@ -20,8 +22,8 @@ import { useNotificationService } from "hooks/services/Notification"; import styles from "./CreateConnectionForm.module.scss"; import { SchemaError } from "./SchemaError"; import { SimplifiedConnectionConfiguration } from "./SimplifiedConnectionCreation/SimplifiedConnectionConfiguration"; +import { I18N_KEY_UNDER_ONE_HOUR_NOT_ALLOWED } from "./SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField"; import { useAnalyticsTrackFunctions } from "./useAnalyticsTrackFunctions"; -import { ScrollableContainer } from "../../ScrollableContainer"; import { FormConnectionFormValues, useConnectionValidationSchema, @@ -78,8 +80,11 @@ const CreateConnectionFormInner: React.FC = () => { queryClient.invalidateQueries(connectionsKeys.statuses([createdConnection.connectionId])); }, 2000); } - } catch (e) { - setSubmitError(e); + } catch (error) { + setSubmitError(error); + // Needs to be re-thrown so react-hook-form can handle the error. We should probably get rid of setSubmitError + // entirely and just use react-hook-form to handle errors. + throw error; } }, [ @@ -96,6 +101,17 @@ const CreateConnectionFormInner: React.FC = () => { ] ); + const onError = useCallback( + (error: Error, _values: FormConnectionFormValues, methods: UseFormReturn) => { + if (error instanceof HttpError && HttpProblem.isType(error, "error:cron-validation/under-one-hour-not-allowed")) { + methods.setError("scheduleData.cron.cronExpression", { + message: I18N_KEY_UNDER_ONE_HOUR_NOT_ALLOWED, + }); + } + }, + [] + ); + return (
}> @@ -103,6 +119,7 @@ const CreateConnectionFormInner: React.FC = () => { defaultValues={initialValues} schema={validationSchema} onSubmit={onSubmit} + onError={onError} trackDirtyChanges formTrackerId={CREATE_CONNECTION_FORM_ID} > @@ -135,9 +152,9 @@ export const CreateConnectionForm: React.FC = () => { if (schemaErrorStatus) { return ( - + - + ); } if (!schema) { diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss index ecdcbb20c26..cc744899574 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.module.scss @@ -2,11 +2,6 @@ @use "scss/variables"; @use "scss/mixins"; -.container { - height: 100%; - padding: variables.$spacing-xl; -} - .nextLink { @include mixins.base-button; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx index 9b65ea3f30e..17d9dcd6404 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionConfiguration.tsx @@ -27,7 +27,6 @@ import { ConnectionRoutePaths, RoutePaths } from "pages/routePaths"; import styles from "./SimplifiedConnectionConfiguration.module.scss"; import { SimplifiedConnectionsSettingsCard } from "./SimplifiedConnectionSettingsCard"; import { SimplifiedSchemaQuestionnaire } from "./SimplifiedSchemaQuestionnaire"; -import { ScrollableContainer } from "../../../ScrollableContainer"; import { SyncCatalogTable } from "../../SyncCatalogTable"; import { CREATE_CONNECTION_FORM_ID } from "../CreateConnectionForm"; @@ -69,7 +68,7 @@ const SimplifiedConnectionCreationReplication: React.FC = () => { }); return ( - + { }); return ( - + - + ); }; diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx index 5f921a2a162..b301746b996 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField.tsx @@ -10,7 +10,6 @@ import { useBasicFrequencyDropdownData, } from "components/connection/ConnectionForm/ScheduleFormField/useBasicFrequencyDropdownData"; import { useTrackConnectionFrequency } from "components/connection/ConnectionForm/ScheduleFormField/useTrackConnectionFrequency"; -import { I18N_KEY_UNDER_ONE_HOUR_NOT_ALLOWED } from "components/connection/ConnectionForm/schema"; import { FormControlFooterError, FormControlFooter, FormControlFooterInfo } from "components/forms/FormControl"; import { ControlLabels } from "components/LabeledControl"; import { FlexContainer } from "components/ui/Flex"; @@ -28,6 +27,8 @@ import { useConnectionFormService } from "hooks/services/ConnectionForm/Connecti import { InputContainer } from "./InputContainer"; import styles from "./SimplifiedConnectionScheduleFormField.module.scss"; +export const I18N_KEY_UNDER_ONE_HOUR_NOT_ALLOWED = "form.cronExpression.underOneHourNotAllowed"; + export const SimplifiedConnectionScheduleFormField: React.FC<{ disabled: boolean }> = ({ disabled }) => { const watchedScheduleType = useWatch({ name: "scheduleType" }); diff --git a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.tsx b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.tsx index a773c8537d9..66ab824e096 100644 --- a/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.tsx +++ b/airbyte-webapp/src/components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard.tsx @@ -24,6 +24,7 @@ import { SimplifiedSchemaChangeNotificationFormField } from "./SimplifiedSchemaC interface SimplifiedConnectionsSettingsCardProps { title: string; isCreating: boolean; + hasConfiguredGeography?: boolean; source: SourceRead; destination: DestinationRead; isDeprecated?: boolean; @@ -32,6 +33,7 @@ interface SimplifiedConnectionsSettingsCardProps { export const SimplifiedConnectionsSettingsCard: React.FC = ({ title, isCreating, + hasConfiguredGeography = false, source, destination, isDeprecated = false, @@ -78,7 +80,9 @@ export const SimplifiedConnectionsSettingsCard: React.FC - {canEditDataGeographies && } + {canEditDataGeographies && hasConfiguredGeography && ( + + )} {!isCreating && ( - + Full refresh | Overwrite + Deduped +

+ +
- + Full refresh | Overwrite + Deduped +

+ +
= Omit, "controlButton" | "buttonClassName" | "optionClassName">; + +export const CatalogListBox = ({ ...rest }: CatalogListBoxProps) => { + const { formatMessage } = useIntl(); + + const controlButtonContent = ({ selectedOption, isDisabled }: ListBoxControlButtonProps) => ( + + {selectedOption ? selectedOption.label : formatMessage({ id: "form.selectValue" })} + + ); + + const ControlButton = React.forwardRef>((props, ref) => ( + - } - /> - {modalOpen && ( - { - setModalOpen(false); - if (!newInput) { - return; - } - setValue(path, `${getValues(path) || ""}{{ config['${newInput.key}'] }}`, { - shouldDirty: true, - shouldTouch: true, - shouldValidate: true, - }); - }} - /> - )} - - ); - } -); -InnerUserInputHelper.displayName = "InnerUserInputHelper"; - -const UserInputHelperControlButton: React.FC> = () => { - return ( - } placement="top"> - - - ); -}; diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderInputPlaceholder.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderInputPlaceholder.tsx index edf56b5fa35..309fb627e01 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderInputPlaceholder.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderInputPlaceholder.tsx @@ -18,7 +18,7 @@ export interface BuilderFieldProps { export const BuilderInputPlaceholder = (props: BuilderFieldProps) => { const { setTestingValuesInputOpen } = useConnectorBuilderFormManagementState(); - const { label, tooltip } = getLabelAndTooltip(props.label, props.tooltip, props.manifestPath, "", true, true); + const { label, tooltip } = getLabelAndTooltip(props.label, props.tooltip, props.manifestPath, "", true); return ( diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderOneOf.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderOneOf.tsx index 906f96c5884..e789ba325ef 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderOneOf.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderOneOf.tsx @@ -27,7 +27,6 @@ interface BuilderOneOfProps { tooltip?: string | React.ReactNode; manifestPath?: string; manifestOptionPaths?: string[]; - omitInterpolationContext?: boolean; onSelect?: (type: string) => void; } @@ -38,7 +37,6 @@ export const BuilderOneOf = ({ path, manifestPath, manifestOptionPaths, - omitInterpolationContext, onSelect, }: BuilderOneOfProps) => { const { setValue, unregister } = useFormContext(); @@ -60,7 +58,6 @@ export const BuilderOneOf = ({ manifestPath, path, false, - omitInterpolationContext, manifestOptionPaths ); diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderRequestInjection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderRequestInjection.tsx index 49ad3e88552..488f415b80e 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderRequestInjection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/BuilderRequestInjection.tsx @@ -2,7 +2,6 @@ import { useFormContext } from "react-hook-form"; import { useIntl } from "react-intl"; import { BuilderField } from "./BuilderField"; -import { BuilderFieldWithInputs } from "./BuilderFieldWithInputs"; import { useWatchWithPreview } from "../preview"; import { InjectIntoValue, injectIntoOptions } from "../useBuilderValidationSchema"; @@ -44,8 +43,8 @@ export const BuilderRequestInjection: React.FC = ( tooltip={tooltip || formatMessage({ id: "connectorBuilder.injectInto.tooltip" }, { descriptor })} /> {value !== "path" && ( - option.value === value)?.fieldLabel ?? diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/ErrorHandlerSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/ErrorHandlerSection.tsx index 41da8c48cc0..2318e63570b 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/ErrorHandlerSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/ErrorHandlerSection.tsx @@ -74,12 +74,12 @@ export const ErrorHandlerSection: React.FC = (props) = children: ( <> = (props) = children: ( <> = (props) = > <> = (props) = manifestPath="HttpResponseFilter.properties.action" /> { @@ -20,8 +20,8 @@ export const GlobalConfigView: React.FC = () => {
- } diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/IncrementalSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/IncrementalSection.tsx index 4f3250c1397..5df30b3bb30 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/IncrementalSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/IncrementalSection.tsx @@ -13,7 +13,6 @@ import { useConnectorBuilderTestRead } from "services/connectorBuilder/Connector import { BuilderCard } from "./BuilderCard"; import { BuilderField } from "./BuilderField"; -import { BuilderFieldWithInputs } from "./BuilderFieldWithInputs"; import { BuilderInputPlaceholder } from "./BuilderInputPlaceholder"; import { BuilderOneOf } from "./BuilderOneOf"; import { BuilderOptional } from "./BuilderOptional"; @@ -171,8 +170,8 @@ export const IncrementalSection: React.FC = ({ streamFi }, children: ( <> - = ({ streamFi : "connectorBuilder.incremental.custom.value.startDatetime.tooltip.default", })} /> - = ({ streamFi }, children: ( <> - - = ({ streamFi /> )} - = ({ streamFi cursor_granularity: "", }} > - - = ({ streamFi /> )} - const datetimeFields = Object.keys(data?.inferred_datetime_formats || {}); return ( - { const mostRecentRecordValues = data?.slices?.at(0)?.pages.at(0)?.records.at(0); const cursorValue = mostRecentRecordValues?.[fieldValue]; @@ -359,7 +358,7 @@ const CursorField = ({ streamFieldPath }: { streamFieldPath: StreamPathFn }) => ) : undefined; }} - type={datetimeFields.length > 0 ? "combobox" : "string"} + type={datetimeFields.length > 0 ? "combobox" : "jinja"} path={streamFieldPath(CURSOR_PATH)} manifestPath="DatetimeBasedCursor.properties.cursor_field" options={datetimeFields.map((field) => ({ label: field, value: field }))} diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/InputsForm.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/InputsForm.tsx index e4328c34cdf..ad6f31e2fef 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/InputsForm.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/InputsForm.tsx @@ -116,7 +116,8 @@ export const InputForm = ({ .notOneOf( inputInEditing?.isNew ? usedKeys : usedKeys.filter((key) => key !== inputInEditing?.key), "connectorBuilder.duplicateFieldID" - ), + ) + .required("form.empty.error"), required: yup.bool(), definition: yup.object().shape({ title: yup.string().required("form.empty.error"), @@ -279,7 +280,9 @@ const InputModal = ({ const { formatMessage } = useIntl(); useEffectOnce(() => { // key input is always touched so errors are shown right away as it will be auto-set by the user changing the title - setValue("key", inputInEditing.key, { shouldValidate: true }); + if (inputInEditing.key) { + setValue("key", inputInEditing.key, { shouldValidate: true }); + } }); return ( diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/JinjaInput.module.scss b/airbyte-webapp/src/components/connectorBuilder/Builder/JinjaInput.module.scss new file mode 100644 index 00000000000..857dcc59f83 --- /dev/null +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/JinjaInput.module.scss @@ -0,0 +1,146 @@ +@use "scss/variables"; +@use "scss/colors"; +@use "scss/fonts"; + +.container { + border: variables.$border-thin solid colors.$grey-200; + border-radius: variables.$border-radius-sm; + + &.error { + border-color: colors.$red-200; + } + + &.disabled { + border-color: colors.$grey-100; + cursor: not-allowed; + + & div { + cursor: not-allowed; + pointer-events: none; + } + } + + &:not(.disabled, .readOnly, .focused):hover { + border-color: colors.$grey-300; + + &.error { + border-color: colors.$red; + } + } + + &.focused { + border-color: colors.$blue; + } + + & :global(.monaco-editor) { + border-radius: inherit; + } + + & :global(.overflow-guard) { + border-radius: inherit; + } +} + +.editor { + & span[title="Close"] { + display: none; + } + + & h2, + h3, + h4, + h5 { + margin: 0; + } + + & :global(.monaco-editor) { + & :global(.ghost-text-decoration) { + color: colors.$grey-400 !important; + } + + & :global(.suggest-widget) { + background-color: colors.$grey-50; + border-color: colors.$grey-200; + + & :global(a.label-name) { + color: colors.$dark-blue; + + & :global(.highlight) { + color: colors.$blue !important; + } + } + + & :global(.details-label) { + color: colors.$dark-blue; + } + + & :global(.monaco-list-row.focused) { + background-color: colors.$blue !important; + + & :global(a.label-name) { + color: colors.$white; + + & :global(.highlight) { + color: colors.$green !important; + } + } + + & :global(.details-label) { + color: colors.$white; + } + } + + & :global(div.monaco-list-row:hover:not(.selected, .focused)) { + background-color: colors.$grey-100 !important; + } + + & :global(.codicon.codicon-symbol-field) { + color: colors.$blue-400; + } + + & :global(.codicon.codicon-symbol-function) { + color: colors.$green-500; + } + + & :global(.codicon.codicon-symbol-variable) { + color: colors.$orange-600; + } + + & :global(.codicon.codicon-account) { + color: colors.$dark-blue-500; + } + } + + & :global(.monaco-hover), + & :global(.suggest-details-container) :global(.suggest-details) { + background-color: colors.$grey-50 !important; + border-color: colors.$grey-200 !important; + + & div { + font-size: variables.$font-size-md; + color: colors.$dark-blue; + } + + & p { + margin: variables.$spacing-md 0; + } + + & pre, + code { + background-color: colors.$grey-100; + border-radius: variables.$border-radius-2xs; + padding: variables.$spacing-xs variables.$spacing-sm; + white-space: pre-wrap; + word-break: break-word; + font-size: variables.$font-size-sm; + } + } + } +} + +:export { + fontFamily: fonts.$primary; + fontSize: variables.$font-size-lg; + textForeground: colors.$dark-blue; + jinjaForeground: colors.$blue-500; +} diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/JinjaInput.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/JinjaInput.tsx new file mode 100644 index 00000000000..d4f87295fa2 --- /dev/null +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/JinjaInput.tsx @@ -0,0 +1,482 @@ +import { Monaco } from "@monaco-editor/react"; +import classNames from "classnames"; +import isString from "lodash/isString"; +import { IRange, languages, Range, editor, Position } from "monaco-editor"; +import React, { useCallback, useState } from "react"; +import { useFormContext } from "react-hook-form"; +import { useIntl } from "react-intl"; + +import { CodeEditor } from "components/ui/CodeEditor"; + +import { useConnectorBuilderFormManagementState } from "services/connectorBuilder/ConnectorBuilderStateService"; + +import { JINJA_TOKEN, NON_JINJA_TOKEN, conf, language } from "./jinja"; +import styles from "./JinjaInput.module.scss"; +import { getInterpolationValues, getInterpolationVariablesByManifest, InterpolationValue } from "./manifestHelpers"; +import { BuilderFormInput } from "../types"; +import { formatJson } from "../utils"; + +const ADD_NEW_USER_INPUT_COMMAND = "addNewUserInput"; +const HIDDEN_INTERPOLATION_VALUES = ["parameters", "stream_slice"]; +const interpolationValues = getInterpolationValues(); + +let isSuggestionDetailShown = false; + +interface JinjaInputProps { + name: string; + value: string; + onChange: (value: string | undefined) => void; + onBlur: (value: string) => void; + disabled?: boolean; + readOnly?: boolean; + error?: boolean; + manifestPath?: string; +} + +type SuggestableValue = + | { + suggestionType: "userInput"; + input: BuilderFormInput; + } + | { + suggestionType: "interpolation"; + value: InterpolationValue; + } + | { + suggestionType: "newUserInput"; + }; + +export const JinjaInput: React.FC = ({ + name, + value, + onChange, + onBlur, + disabled, + readOnly, + error, + manifestPath, +}) => { + const { formatMessage } = useIntl(); + const { getValues } = useFormContext(); + const { setNewUserInputContext } = useConnectorBuilderFormManagementState(); + const [focused, setFocused] = useState(false); + + const formatExamples = useCallback( + (examples: string[] | object[]): string => { + if (!examples || examples.length === 0) { + return ""; + } + const title = formatMessage({ id: "jinjaInput.suggest.examples" }, { count: examples.length }); + const content = examples + .map((example) => { + return `
${isString(example) ? example : formatJson(example)}
`; + }) + .join("\n"); + return `#### ${title}\n${content}`; + }, + [formatMessage] + ); + + const buildDocumentation = useCallback( + (value: InterpolationValue, label: string) => { + return `### ${label}\n${value.description}\n${formatExamples(value.examples)}`; + }, + [formatExamples] + ); + + const convertInterpolationValueToCompletionItem = useCallback( + (value: InterpolationValue, range: IRange, index: number): languages.CompletionItem => { + const [descriptionFirstSentence] = value.description.split("."); + + const { label, hasArguments } = buildLabel(value); + + return { + insertText: + value.type === "variable" + ? `${value.title}['\${0}']` + : value.type === "macro" + ? hasArguments + ? `${value.title}(\${0})` + : `${value.title}()` + : hasArguments + ? `| ${value.title}(\${0})` + : `| ${value.title}`, + insertTextRules: languages.CompletionItemInsertTextRule.InsertAsSnippet, + kind: + value.type === "variable" + ? languages.CompletionItemKind.Field + : value.type === "macro" + ? languages.CompletionItemKind.Function + : languages.CompletionItemKind.Variable, + label: { + label, + description: descriptionFirstSentence, + }, + documentation: { + value: buildDocumentation(value, label), + isTrusted: true, + supportHtml: true, + }, + // enforce custom sorting based on index, which only works with letters + sortText: numberToLetter(index), + range, + }; + }, + [buildDocumentation] + ); + + const convertUserInputToCompletionItem = useCallback( + (userInput: BuilderFormInput, range: IRange, index: number): languages.CompletionItem => { + const label = userInput.definition.title || userInput.key; + const testingValue = getValues("testingValues")?.[userInput.key]; + const formattedTestingValue = isString(testingValue) ? testingValue : formatJson(testingValue); + + return { + insertText: `config['${userInput.key}']`, + insertTextRules: languages.CompletionItemInsertTextRule.InsertAsSnippet, + kind: languages.CompletionItemKind.User, + label: { + label, + description: formattedTestingValue, + }, + documentation: { + value: `### ${label}\n${formatMessage( + { id: "jinjaInput.suggest.userInput.description" }, + { label } + )}\n#### ${formatMessage({ + id: "jinjaInput.suggest.userInput.currentTestingValue", + })}\n
${formattedTestingValue}
`, + isTrusted: true, + supportHtml: true, + }, + // enforce custom sorting based on index, which only works with letters + sortText: numberToLetter(index), + range, + }; + }, + [formatMessage, getValues] + ); + + const getSuggestions = useCallback( + ( + model: editor.ITextModel, + position: Position, + monaco: Monaco, + localManifestPath: string | undefined + ): languages.CompletionItem[] => { + // Get the non-whitespace text before and after the cursor + const word = model.getWordAtPosition(position); + const range: IRange = { + startLineNumber: position.lineNumber, + endLineNumber: position.lineNumber, + startColumn: word?.startColumn ?? position.column, + endColumn: word?.endColumn ?? position.column, + }; + + const cursorToken = getTokenAtPosition(monaco, model, position); + if (cursorToken?.type !== JINJA_TOKEN) { + return []; + } + + const supportedVariables = localManifestPath ? getInterpolationVariablesByManifest(localManifestPath) : undefined; + const validInterpolationValues = interpolationValues + .filter((value) => !HIDDEN_INTERPOLATION_VALUES.includes(value.title)) + .filter( + (value) => value.type !== "variable" || !supportedVariables || supportedVariables.includes(value.title) + ); + + const userInputs: BuilderFormInput[] = getValues("formValues.inputs"); + const suggestableNewUserInput: SuggestableValue[] = [{ suggestionType: "newUserInput" as const }]; + const suggestableValues: SuggestableValue[] = suggestableNewUserInput + .concat( + userInputs.map((input) => ({ + input, + suggestionType: "userInput" as const, + })) + ) + .concat( + validInterpolationValues.map((value) => { + return { value, suggestionType: "interpolation" as const }; + }) + ); + const suggestionsWithTitles: Array<{ suggestion: languages.CompletionItem; titleFromManifestSchema: string }> = + suggestableValues.map((value, index) => { + if (value.suggestionType === "userInput") { + return { + suggestion: convertUserInputToCompletionItem(value.input, range, index), + titleFromManifestSchema: value.input.definition.title || value.input.key, + }; + } else if (value.suggestionType === "interpolation") { + return { + suggestion: convertInterpolationValueToCompletionItem(value.value, range, index), + titleFromManifestSchema: value.value.title, + }; + } + return { + suggestion: { + insertText: "", + kind: languages.CompletionItemKind.User, + label: formatMessage({ id: "jinjaInput.suggest.userInput.createNew.label" }), + documentation: { + value: formatMessage({ id: "jinjaInput.suggest.userInput.createNew.doc" }), + }, + range, + // enforce custom sorting based on index, which only works with letters + sortText: numberToLetter(index), + command: { + id: ADD_NEW_USER_INPUT_COMMAND, + title: "", + arguments: [model, position], + }, + }, + titleFromManifestSchema: "", + }; + }); + + return ( + word + ? suggestionsWithTitles.filter(({ titleFromManifestSchema, suggestion }) => { + const label = isString(suggestion.label) ? suggestion.label : suggestion.label.label; + return ( + (label.startsWith(word.word) || titleFromManifestSchema.startsWith(word.word)) && + label !== word.word && + titleFromManifestSchema !== word.word + ); + }) + : suggestionsWithTitles + ).map(({ suggestion }) => suggestion); + }, + [convertInterpolationValueToCompletionItem, convertUserInputToCompletionItem, formatMessage, getValues] + ); + + return ( + { + if (monaco.languages.getLanguages().find((lang) => lang.id === "jinja")) { + return; + } + monaco.languages.register({ id: "jinja" }); + monaco.languages.registerCompletionItemProvider("jinja", { + provideCompletionItems: (model, position) => { + // See ! HACK ! comment below for explanation + // eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unnecessary-type-assertion + return { suggestions: getSuggestions(model, position, monaco, (model as any).manifestPath) }; + }, + }); + monaco.languages.registerHoverProvider("jinja", { + provideHover: (model, position) => { + const token = getTokenAtPosition(monaco, model, position); + if (token?.type !== JINJA_TOKEN) { + return { contents: [] }; + } + const word = model.getWordAtPosition(position); + const value = interpolationValues.find( + (value) => value.title === word?.word || buildLabel(value).label === word?.word + ); + if (!value) { + return { contents: [] }; + } + return { + contents: [ + { value: buildDocumentation(value, buildLabel(value).label), isTrusted: true, supportHtml: true }, + ], + }; + }, + }); + monaco.languages.setLanguageConfiguration("jinja", conf); + monaco.languages.setMonarchTokensProvider("jinja", language); + + monaco.editor.registerCommand( + ADD_NEW_USER_INPUT_COMMAND, + (_accessor, model: editor.ITextModel, position: Position) => { + setNewUserInputContext({ model, position }); + } + ); + }} + onMount={(editor, monaco) => { + const model = editor.getModel(); + // ! HACK ! - this attaches the manifestPath to the model, so that the provideCompletionItems call + // above pulls the value from the model. + // This is needed because the provideCompletionItems function is set on the language, not individual + // editor instances, so it can't change from one instance to the next. Therefore the only way to have + // it produce different results is to shove the unique value into the model itself. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (model as any).manifestPath = manifestPath; + + // Prevent newlines + editor.onDidChangeModelContent(() => { + const value = editor.getValue(); + if (value.includes("\n")) { + editor.setValue(value.replace(/\n/g, "")); // Prevent newlines + } + }); + + editor.onDidChangeCursorSelection((e) => { + const model = editor.getModel(); + if (!model) { + return; + } + const position = e.selection.getPosition(); + + // Check suggestions length, because we don't want to trigger the suggest widget + // when there are none, as then it would show "No suggestions" + const suggestions = getSuggestions(model, position, monaco, manifestPath); + if (suggestions.length > 0) { + editor.trigger("cursorChange", "editor.action.triggerSuggest", {}); + // In order to have suggestion details shown by default, we need to manually trigger the + // "toggleSuggestionDetails" command. But, this command can only be triggered if the suggest + // widget is open, so we need to wait for the next tick to trigger it. + // Since we only have a "toggle" command and not a "show" command, we keep track of the state + // of this setting in a javascript variable, and only trigger it if it's not already shown, + // so that it is only triggered one time and left on. + setTimeout(() => { + if (!isSuggestionDetailShown) { + editor.trigger("cursorChange", "toggleSuggestionDetails", {}); + isSuggestionDetailShown = true; + } + }, 0); + } else { + editor.trigger("editor", "hideSuggestWidget", {}); + } + }); + + editor.onDidFocusEditorWidget(() => { + setFocused(true); + }); + + editor.onDidBlurEditorWidget(() => { + setFocused(false); + }); + + editor.addAction({ + id: "insertBracketsLeft", + label: "Insert Brackets (Left)", + keybindings: [monaco.KeyMod.Shift | monaco.KeyCode.BracketLeft], + run: async () => { + insertBrackets("{", editor, monaco); + }, + }); + + editor.addAction({ + id: "insertBracketsRight", + label: "Insert Brackets (Right)", + keybindings: [monaco.KeyMod.Shift | monaco.KeyCode.BracketRight], + run: async () => { + insertBrackets("}", editor, monaco); + }, + }); + }} + height="35px" + options={{ + cursorStyle: "line-thin", + lineNumbers: "off", + suggest: { + showWords: false, + snippetsPreventQuickSuggestions: true, + preview: true, + }, + renderLineHighlight: "none", + scrollbar: { + vertical: "hidden", + horizontal: "hidden", + }, + overviewRulerLanes: 0, + folding: false, + lineDecorationsWidth: 8, + padding: { + top: 5, + }, + fontFamily: styles.fontFamily, + fontSize: 14, + scrollBeyondLastLine: false, + fixedOverflowWidgets: true, + }} + language="jinja" + bubbleUpUndoRedo + tabFocusMode + /> + ); +}; + +const numberToLetter = (number: number): string => { + return String.fromCharCode("a".charCodeAt(0) + number); +}; + +const buildLabel = (value: InterpolationValue): { label: string; hasArguments: boolean } => { + const hasArguments = (value.type === "macro" || value.type === "filter") && Object.keys(value.arguments).length > 0; + const label = value.type === "macro" || hasArguments ? `${value.title}()` : value.title; + if (value.type === "filter") { + return { label: `| ${label}`, hasArguments }; + } + return { label, hasArguments }; +}; + +const getTokenAtPosition = (monaco: Monaco, model: editor.ITextModel, position: Position) => { + const currentLine = model.getLineContent(position.lineNumber); + const tokens = monaco.editor.tokenize(currentLine, "jinja")[0]; + const currentColumn = position.column - 1; // Adjust column to zero-based index + let currentTokenIndex = 0; + while (currentTokenIndex < tokens.length - 1 && tokens[currentTokenIndex + 1].offset < currentColumn) { + currentTokenIndex += 1; + } + + const token = tokens[currentTokenIndex]; + if (!token) { + return null; + } + return token; +}; + +const insertBrackets = (pressedKey: "}" | "{", editor: editor.IStandaloneCodeEditor, monaco: Monaco) => { + const position = editor.getPosition(); + if (!position) { + return; + } + + const model = editor.getModel(); + if (!model) { + return; + } + + // if inside a jinja expression, just insert the character as normal + const cursorToken = getTokenAtPosition(monaco, model, position); + if (cursorToken && cursorToken.type !== NON_JINJA_TOKEN) { + editor.executeEdits(null, [ + { + range: new Range(position.lineNumber, position.column, position.lineNumber, position.column), + text: pressedKey, + forceMoveMarkers: true, + }, + ]); + return; + } + + // Insert `{{ }}` + editor.executeEdits(null, [ + { + range: new Range(position.lineNumber, position.column, position.lineNumber, position.column), + text: "{{ }}", + forceMoveMarkers: true, + }, + ]); + + // Move the cursor to between the braces + editor.setPosition({ + lineNumber: position.lineNumber, + column: position.column + 3, + }); + + // Show the suggestions widget + editor.trigger("cursorChange", "editor.action.triggerSuggest", {}); +}; diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/KeyValueListField.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/KeyValueListField.tsx index 865a02a5a2f..0301131090a 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/KeyValueListField.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/KeyValueListField.tsx @@ -8,7 +8,7 @@ import { Button } from "components/ui/Button"; import { FlexContainer, FlexItem } from "components/ui/Flex"; import { RemoveButton } from "components/ui/RemoveButton/RemoveButton"; -import { BuilderFieldWithInputs } from "./BuilderFieldWithInputs"; +import { BuilderField } from "./BuilderField"; import { getLabelAndTooltip } from "./manifestHelpers"; import { useBuilderWatchArrayWithPreview } from "../preview"; import { BuilderState, concatPath } from "../types"; @@ -23,16 +23,12 @@ const KeyValueInput: React.FC = ({ onRemove, path }) => { return ( - + - @@ -47,7 +43,6 @@ interface KeyValueListFieldProps { tooltip?: ReactNode; manifestPath?: string; optional?: boolean; - omitInterpolationContext?: boolean; } export const KeyValueListField: React.FC = ({ @@ -56,16 +51,8 @@ export const KeyValueListField: React.FC = ({ tooltip, manifestPath, optional, - omitInterpolationContext = false, }) => { - const { label: finalLabel, tooltip: finalTooltip } = getLabelAndTooltip( - label, - tooltip, - manifestPath, - path, - false, - omitInterpolationContext - ); + const { label: finalLabel, tooltip: finalTooltip } = getLabelAndTooltip(label, tooltip, manifestPath, path, false); const { fieldValue: fields, append, remove } = useBuilderWatchArrayWithPreview(path); return ( diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx index 18666626b4d..0b3c49c9448 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/PaginationSection.tsx @@ -10,7 +10,6 @@ import { links } from "core/utils/links"; import { BuilderCard } from "./BuilderCard"; import { BuilderField } from "./BuilderField"; -import { BuilderFieldWithInputs } from "./BuilderFieldWithInputs"; import { BuilderOneOf } from "./BuilderOneOf"; import { BuilderRequestInjection } from "./BuilderRequestInjection"; import { ToggleGroupField } from "./ToggleGroupField"; @@ -238,13 +237,13 @@ export const PaginationSection: React.FC = ({ streamFiel }, children: ( <> - - - = ({ stre label={formatMessage({ id: "connectorBuilder.parentStreams.label" })} tooltip={formatMessage({ id: "connectorBuilder.parentStreams.parentStream.tooltip" })} /> - - = ({ optional /> = (props) => { const { formatMessage } = useIntl(); @@ -36,7 +35,6 @@ export const RequestOptionSection: React.FC = (props) key="json_list" manifestPath="HttpRequester.properties.request_body_json" optional - omitInterpolationContext={props.omitInterpolationContext} /> ), }, @@ -52,7 +50,6 @@ export const RequestOptionSection: React.FC = (props) key="form_list" manifestPath="HttpRequester.properties.request_body_data" optional - omitInterpolationContext={props.omitInterpolationContext} /> ), }, @@ -67,7 +64,6 @@ export const RequestOptionSection: React.FC = (props) type="jsoneditor" path={concatPath(props.basePath, "requestBody.value")} manifestPath="HttpRequester.properties.request_body_json" - omitInterpolationContext={props.omitInterpolationContext} /> ), }, @@ -83,7 +79,6 @@ export const RequestOptionSection: React.FC = (props) path={concatPath(props.basePath, "requestBody.value")} label={formatMessage({ id: "connectorBuilder.requestOptions.stringFreeform.value" })} manifestPath="HttpRequester.properties.request_body_data" - omitInterpolationContext={props.omitInterpolationContext} /> ), }, @@ -95,19 +90,16 @@ export const RequestOptionSection: React.FC = (props) path={concatPath(props.basePath, "requestParameters")} manifestPath="HttpRequester.properties.request_parameters" optional - omitInterpolationContext={props.omitInterpolationContext} /> path={concatPath(props.basePath, "requestBody")} label={formatMessage({ id: "connectorBuilder.requestOptions.requestBody" })} options={getBodyOptions()} - omitInterpolationContext={props.omitInterpolationContext} /> ); diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/StreamConfigView.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/StreamConfigView.tsx index 3ae5997428a..8694f892935 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/StreamConfigView.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/StreamConfigView.tsx @@ -12,6 +12,7 @@ import { Pre } from "components/ui/Pre"; import { Text } from "components/ui/Text"; import { + GzipJsonDecoderType, IterableDecoderType, JsonDecoderType, JsonlDecoderType, @@ -29,7 +30,6 @@ import { AddStreamButton } from "./AddStreamButton"; import { BuilderCard } from "./BuilderCard"; import { BuilderConfigView } from "./BuilderConfigView"; import { BuilderField } from "./BuilderField"; -import { BuilderFieldWithInputs } from "./BuilderFieldWithInputs"; import { BuilderTitle } from "./BuilderTitle"; import { ErrorHandlerSection } from "./ErrorHandlerSection"; import { IncrementalSection } from "./IncrementalSection"; @@ -84,8 +84,8 @@ export const StreamConfigView: React.FC = React.memo(({ s {selectedTab === "configuration" ? (
- `${baseUrl}${value}` : undefined} @@ -109,6 +109,7 @@ export const StreamConfigView: React.FC = React.memo(({ s XmlDecoderType.XmlDecoder, JsonlDecoderType.JsonlDecoder, IterableDecoderType.IterableDecoder, + GzipJsonDecoderType.GzipJsonDecoder, ]} /> key={schemaFieldPath} value={schema || ""} language="json" - automaticLayout onChange={(val: string | undefined) => { setValue(path, val, { shouldValidate: true, diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/TransformationSection.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/TransformationSection.tsx index 9842c0949de..1263fd770f8 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/TransformationSection.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/TransformationSection.tsx @@ -5,7 +5,6 @@ import { links } from "core/utils/links"; import { BuilderCard } from "./BuilderCard"; import { BuilderField } from "./BuilderField"; -import { BuilderFieldWithInputs } from "./BuilderFieldWithInputs"; import { BuilderList } from "./BuilderList"; import { BuilderOneOf, OneOfOption } from "./BuilderOneOf"; import { getDescriptionByManifest, getLabelByManifest } from "./manifestHelpers"; @@ -49,11 +48,7 @@ export const TransformationSection: React.FC = ({ children: ( <> - + ), }, diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/jinja.ts b/airbyte-webapp/src/components/connectorBuilder/Builder/jinja.ts new file mode 100644 index 00000000000..ad0ac09bfac --- /dev/null +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/jinja.ts @@ -0,0 +1,91 @@ +/* --------------------------------------------------------------------------------------------- + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt in the project root for license information. + *--------------------------------------------------------------------------------------------*/ + +// based on https://github.com/microsoft/monaco-languages/blob/master/src/twig/twig.ts + +import { languages } from "monaco-editor"; + +import IRichLanguageConfiguration = languages.LanguageConfiguration; +import ILanguage = languages.IMonarchLanguage; + +export const conf: IRichLanguageConfiguration = { + wordPattern: /((\|\s*)?\w+(\(\))?)|(\| ?)/g, + + comments: { + blockComment: ["{#", "#}"], + }, + + brackets: [ + ["{#", "#}"], + ["{%", "%}"], + ["{{", "}}"], + ["(", ")"], + ["[", "]"], + + // HTML + [""], + ["<", ">"], + ], + + autoClosingPairs: [ + { open: "{# ", close: " #}" }, + { open: "{% ", close: " %}" }, + { open: "{{ ", close: " }}" }, + { open: "[", close: "]" }, + { open: "(", close: ")" }, + { open: '"', close: '"' }, + { open: "'", close: "'" }, + ], + + surroundingPairs: [ + { open: '"', close: '"' }, + { open: "'", close: "'" }, + + // HTML + { open: "<", close: ">" }, + ], +}; + +export const JINJA_TOKEN = "jinja"; +export const NON_JINJA_TOKEN = "non-jinja"; +export const JINJA_STRING_TOKEN = "jinja.string"; +export const JINJA_OTHER_TOKEN = "jinja.other"; +export const JINJA_FIRST_BRACKET_TOKEN = "jinja.bracket.first"; +export const JINJA_CLOSING_BRACKET_TOKEN = "jinja.bracket.closing"; + +export const language: ILanguage = { + tokenPostfix: "", + ignoreCase: true, + + tokenizer: { + root: [ + // Match the first `{` of `{{` + [/\{/, { token: JINJA_FIRST_BRACKET_TOKEN, next: "@expectSecondBracket" }], + [/[^{}]+|\{[^{}]*\}/, NON_JINJA_TOKEN], // Match text outside `{{ }}` + ], + expectSecondBracket: [ + // Match the second `{` and transition into `insideBrackets` + [/\{/, { token: JINJA_TOKEN, next: "@insideBrackets" }], + // Fallback for invalid syntax (single `{` without a second `{`) + [/./, { token: NON_JINJA_TOKEN, next: "@pop" }], + ], + insideBrackets: [ + // Exit `{{ }}` + [/\}\}/, { token: JINJA_CLOSING_BRACKET_TOKEN, next: "@pop" }], + // Enter `""` inside `{{ }}` + [/["']/, { token: JINJA_STRING_TOKEN, next: "@insideString" }], + // Match alphanumeric and whitespace + [/[\w\s(|]/, JINJA_TOKEN], + // Match other characters + [/[^\w\s(|]/, JINJA_OTHER_TOKEN], + ], + insideString: [ + // Exit `""` and return to `@insideBrackets` + [/["']/, { token: JINJA_STRING_TOKEN, next: "@pop" }], + // Match string content inside `""` + [/[^"']+/, JINJA_STRING_TOKEN], + ], + }, +}; diff --git a/airbyte-webapp/src/components/connectorBuilder/Builder/manifestHelpers.tsx b/airbyte-webapp/src/components/connectorBuilder/Builder/manifestHelpers.tsx index eb91f44dd0e..8d4a140907d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/Builder/manifestHelpers.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/Builder/manifestHelpers.tsx @@ -1,12 +1,9 @@ import get from "lodash/get"; import { ReactNode } from "react"; -import { FormattedMessage } from "react-intl"; import ReactMarkdown from "react-markdown"; import { LabelInfo } from "components/Label"; -import { links } from "core/utils/links"; - import declarativeComponentSchema from "../../../../build/declarative_component_schema.yaml"; export interface ManifestDescriptor { @@ -37,48 +34,23 @@ export function getDescriptionByManifest(manifestPath: string) { return getDescriptor(manifestPath)?.description; } +export function getInterpolationVariablesByManifest(manifestPath: string) { + return getDescriptor(manifestPath)?.interpolation_context ?? undefined; +} + export function getLabelAndTooltip( label: string | undefined, tooltip: React.ReactNode | undefined, manifestPath: string | undefined, path: string, omitExamples = false, - omitInterpolationContext = false, manifestOptionPaths?: string[] ): { label: string; tooltip: React.ReactNode | undefined } { const manifestDescriptor = manifestPath ? getDescriptor(manifestPath) : undefined; const finalLabel = label || manifestDescriptor?.title || path; - let finalDescription: ReactNode = manifestDescriptor?.description ? ( + const finalDescription: ReactNode = manifestDescriptor?.description ? ( {manifestDescriptor?.description} ) : undefined; - if (!omitInterpolationContext && manifestDescriptor?.interpolation_context) { - finalDescription = ( - <> - {finalDescription} -
- :{" "} -
    - {manifestDescriptor.interpolation_context.map((context, i) => ( -
  • - - {context} - -
  • - ))} -
- ( - - {node} - - ), - }} - /> - - ); - } const options = manifestOptionPaths?.flatMap((optionPath) => { const optionDescriptor: ManifestDescriptor | undefined = get( declarativeComponentSchema, @@ -107,3 +79,36 @@ export function getLabelAndTooltip( ) : null, }; } + +export interface InterpolationVariable { + title: string; + description: string; + examples: string[] | object[]; +} + +type InterpolationFunction = InterpolationVariable & { + arguments: Record; + return_type: string; +}; + +export type InterpolationValue = + | (InterpolationVariable & { + type: "variable"; + }) + | (InterpolationFunction & { + type: "macro" | "filter"; + }); + +export interface InterpolationValues { + variables: InterpolationVariable[]; + macros: InterpolationFunction[]; + filters: InterpolationFunction[]; +} +export const getInterpolationValues = (): InterpolationValue[] => { + const { variables, macros, filters } = get(declarativeComponentSchema, `interpolation`) as InterpolationValues; + return [ + ...variables.map((variable) => ({ ...variable, type: "variable" as const })), + ...macros.map((macro) => ({ ...macro, type: "macro" as const })), + ...filters.map((filter) => ({ ...filter, type: "filter" as const })), + ]; +}; diff --git a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/AdvancedTestSettings.tsx b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/AdvancedTestSettings.tsx index d4790ffb3ff..14713cb1c87 100644 --- a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/AdvancedTestSettings.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/AdvancedTestSettings.tsx @@ -161,7 +161,6 @@ const AdvancedTestSettingsModal: React.FC< { setValue("testState", val ?? "", { shouldValidate: true, diff --git a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamTestingPanel.tsx b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamTestingPanel.tsx index 06039503a2b..477625808b6 100644 --- a/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamTestingPanel.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/StreamTestingPanel/StreamTestingPanel.tsx @@ -63,7 +63,7 @@ export const StreamTestingPanel: React.FC = () => { ); } - const hasStreams = jsonManifest.streams?.length > 0; + const hasStreams = jsonManifest.streams && jsonManifest.streams.length > 0; return (
diff --git a/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.tsx b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.tsx index 11b1b65356d..3ea4880bda0 100644 --- a/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/YamlEditor/YamlEditor.tsx @@ -78,7 +78,6 @@ export const YamlEditor: React.FC = ({ { yamlEditorRef.current = editor; diff --git a/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts b/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts index fa1ed286f6d..534630f1ff8 100644 --- a/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts +++ b/airbyte-webapp/src/components/connectorBuilder/convertManifestToBuilderForm.ts @@ -43,6 +43,7 @@ import { XmlDecoderType, IterableDecoderType, SimpleRetrieverDecoder, + GzipJsonDecoderType, } from "core/api/types/ConnectorManifest"; import { @@ -384,16 +385,16 @@ function requesterToRequestBody(requester: HttpRequester): BuilderRequestBody { } const manifestDecoderToBuilder = (decoder: SimpleRetrieverDecoder | undefined, streamName: string): BuilderDecoder => { + const supportedDecoderTypes: Array = [ + undefined, + JsonDecoderType.JsonDecoder, + JsonlDecoderType.JsonlDecoder, + XmlDecoderType.XmlDecoder, + IterableDecoderType.IterableDecoder, + GzipJsonDecoderType.GzipJsonDecoder, + ]; const decoderType = decoder?.type; - if ( - ![ - undefined, - JsonDecoderType.JsonDecoder, - JsonlDecoderType.JsonlDecoder, - XmlDecoderType.XmlDecoder, - IterableDecoderType.IterableDecoder, - ].includes(decoderType) - ) { + if (!supportedDecoderTypes.includes(decoderType)) { throw new ManifestCompatibilityError(streamName, "decoder is not supported"); } @@ -406,6 +407,8 @@ const manifestDecoderToBuilder = (decoder: SimpleRetrieverDecoder | undefined, s return "JSON Lines"; case IterableDecoderType.IterableDecoder: return "Iterable"; + case GzipJsonDecoderType.GzipJsonDecoder: + return "gzip JSON"; default: return "JSON"; } diff --git a/airbyte-webapp/src/components/connectorBuilder/types.ts b/airbyte-webapp/src/components/connectorBuilder/types.ts index c57336b4120..ffde91d1ead 100644 --- a/airbyte-webapp/src/components/connectorBuilder/types.ts +++ b/airbyte-webapp/src/components/connectorBuilder/types.ts @@ -88,7 +88,7 @@ export interface BuilderFormInput { type BuilderHttpMethod = "GET" | "POST"; -export const BUILDER_DECODER_TYPES = ["JSON", "XML", "JSON Lines", "Iterable"] as const; +export const BUILDER_DECODER_TYPES = ["JSON", "XML", "JSON Lines", "Iterable", "gzip JSON"] as const; export type BuilderDecoder = (typeof BUILDER_DECODER_TYPES)[number]; interface BuilderRequestOptions { @@ -896,6 +896,8 @@ const builderDecoderToManifest = (decoder: BuilderDecoder): SimpleRetrieverDecod return { type: "JsonlDecoder" }; case "Iterable": return { type: "IterableDecoder" }; + case "gzip JSON": + return { type: "GzipJsonDecoder" }; } }; diff --git a/airbyte-webapp/src/components/connectorBuilder/useStreamTestMetadata.ts b/airbyte-webapp/src/components/connectorBuilder/useStreamTestMetadata.ts index aee02d887af..0d3e0c7128d 100644 --- a/airbyte-webapp/src/components/connectorBuilder/useStreamTestMetadata.ts +++ b/airbyte-webapp/src/components/connectorBuilder/useStreamTestMetadata.ts @@ -74,7 +74,7 @@ export const useStreamTestMetadata = () => { const getStreamTestMetadataStatus = useCallback( (streamName: string): StreamTestMetadataStatus | undefined | null => { - const resolvedStream = resolvedManifest?.streams.find((stream) => stream.name === streamName); + const resolvedStream = resolvedManifest?.streams?.find((stream) => stream.name === streamName); if (!resolvedStream) { // undefined indicates that the stream has not yet been resolved, so warnings should not be shown return undefined; diff --git a/airbyte-webapp/src/components/destination/DestinationConnectionTable/DestinationConnectionTable.module.scss b/airbyte-webapp/src/components/destination/DestinationConnectionTable/DestinationConnectionTable.module.scss new file mode 100644 index 00000000000..74e6df5b983 --- /dev/null +++ b/airbyte-webapp/src/components/destination/DestinationConnectionTable/DestinationConnectionTable.module.scss @@ -0,0 +1,3 @@ +.container { + padding-top: 0; +} diff --git a/airbyte-webapp/src/components/destination/DestinationConnectionTable/DestinationConnectionTable.tsx b/airbyte-webapp/src/components/destination/DestinationConnectionTable/DestinationConnectionTable.tsx index c493d063b63..ecdb2782ae9 100644 --- a/airbyte-webapp/src/components/destination/DestinationConnectionTable/DestinationConnectionTable.tsx +++ b/airbyte-webapp/src/components/destination/DestinationConnectionTable/DestinationConnectionTable.tsx @@ -2,23 +2,22 @@ import React from "react"; import { ConnectionTable } from "components/EntityTable"; import { getConnectionTableData } from "components/EntityTable/utils"; -import { Box } from "components/ui/Box"; import { ScrollParent } from "components/ui/ScrollParent"; import { WebBackendConnectionListItem } from "core/api/types/AirbyteClient"; -interface IProps { +import styles from "./DestinationConnectionTable.module.scss"; + +interface DestinationConnectionTableProps { connections: WebBackendConnectionListItem[]; } -export const DestinationConnectionTable: React.FC = ({ connections }) => { +export const DestinationConnectionTable: React.FC = ({ connections }) => { const data = getConnectionTableData(connections, "destination"); return ( - - - - + + ); }; diff --git a/airbyte-webapp/src/components/forms/Form.stories.tsx b/airbyte-webapp/src/components/forms/Form.stories.tsx index facdfdeab26..01aedfa060c 100644 --- a/airbyte-webapp/src/components/forms/Form.stories.tsx +++ b/airbyte-webapp/src/components/forms/Form.stories.tsx @@ -49,13 +49,16 @@ const listBoxOptions: Array> = ["one", "two", "three"].map((v) => const MyFormControl = FormControl; -export const Primary: StoryObj = { +const MyForm = Form; + +export const Primary: StoryObj = { render: (props) => (
-
new Promise((resolve) => window.setTimeout(resolve, 1000))} @@ -87,17 +90,17 @@ export const Primary: StoryObj = { - +
), }; -export const InlineFormControls: StoryObj = { +export const InlineFormControls: StoryObj = { render: (props) => (
-
= { /> - +
), diff --git a/airbyte-webapp/src/components/forms/Form.tsx b/airbyte-webapp/src/components/forms/Form.tsx index e5e2b8774da..98b728c70ba 100644 --- a/airbyte-webapp/src/components/forms/Form.tsx +++ b/airbyte-webapp/src/components/forms/Form.tsx @@ -23,7 +23,7 @@ interface FormProps { */ onSubmit?: FormSubmissionHandler; onSuccess?: (values: T) => void; - onError?: (e: Error, values: T) => void; + onError?: (e: Error, values: T, methods: UseFormReturn) => void; schema: SchemaOf; defaultValues: DefaultValues; children?: ReactNode | undefined; @@ -88,7 +88,7 @@ export const Form = ({ } }) .catch((e) => { - onError?.(e, values); + onError?.(e, values, methods); }); }; diff --git a/airbyte-webapp/src/components/ui/Badge/Badge.module.scss b/airbyte-webapp/src/components/ui/Badge/Badge.module.scss index 97912ed8749..2e418a75b0a 100644 --- a/airbyte-webapp/src/components/ui/Badge/Badge.module.scss +++ b/airbyte-webapp/src/components/ui/Badge/Badge.module.scss @@ -2,6 +2,7 @@ @use "scss/variables"; .badge { + width: fit-content; font-size: variables.$font-size-xs; border-radius: variables.$border-radius-pill; text-transform: uppercase; diff --git a/airbyte-webapp/src/components/ui/BorderedTiles/BorderedTiles.module.scss b/airbyte-webapp/src/components/ui/BorderedTiles/BorderedTiles.module.scss index cbc820d0889..bcc2bb6fedc 100644 --- a/airbyte-webapp/src/components/ui/BorderedTiles/BorderedTiles.module.scss +++ b/airbyte-webapp/src/components/ui/BorderedTiles/BorderedTiles.module.scss @@ -1,48 +1,45 @@ @use "scss/colors"; @use "scss/variables"; -$min-width-horizontal: 900px; +$min-width-small: 650px; +$min-width-medium: 900px; +$min-width-extra-wide: 1100px; .borderedTiles { - display: grid; - grid-auto-columns: 1fr; - grid-template-rows: 1fr; - gap: variables.$border-thin; - background: colors.$grey-100; - border: variables.$border-thin solid colors.$grey-100; - border-radius: variables.$border-radius-md; container-type: inline-size; - &__tile { - grid-row: 1 / 1; - background: colors.$foreground; + &__grid { + background: colors.$grey-100; + border: variables.$border-thin solid colors.$grey-100; + border-radius: variables.$border-radius-md; + display: grid; + grid-template-rows: 1fr; + gap: variables.$border-thin; overflow: hidden; - padding: variables.$spacing-xl; + grid-auto-flow: column; + grid-auto-columns: 1fr; - @container (max-width: #{$min-width-horizontal}) { - grid-row: auto; + @container (max-width: #{$min-width-medium}) { + grid-auto-flow: row; } - &:first-child { - border-top-left-radius: variables.$border-radius-md; - border-bottom-left-radius: variables.$border-radius-md; + &:has(> .borderedTiles__tile:nth-child(4)) { + grid-auto-flow: unset; + grid-template-columns: repeat(4, 1fr); - @container (max-width: #{$min-width-horizontal}) { - border-top-left-radius: variables.$border-radius-md; - border-top-right-radius: variables.$border-radius-md; - border-bottom-left-radius: 0; + @container (max-width: #{$min-width-extra-wide}) { + grid-template-columns: repeat(2, 1fr); } - } - - &:last-child { - border-top-right-radius: variables.$border-radius-md; - border-bottom-right-radius: variables.$border-radius-md; - @container (max-width: #{$min-width-horizontal}) { - border-top-right-radius: 0; - border-bottom-left-radius: variables.$border-radius-md; - border-bottom-right-radius: variables.$border-radius-md; + @container (max-width: #{$min-width-small}) { + grid-template-columns: 1fr; } } } + + &__tile { + background: colors.$foreground; + overflow: hidden; + padding: variables.$spacing-xl; + } } diff --git a/airbyte-webapp/src/components/ui/BorderedTiles/BorderedTiles.tsx b/airbyte-webapp/src/components/ui/BorderedTiles/BorderedTiles.tsx index 2c0bc37781b..be7d32b6e36 100644 --- a/airbyte-webapp/src/components/ui/BorderedTiles/BorderedTiles.tsx +++ b/airbyte-webapp/src/components/ui/BorderedTiles/BorderedTiles.tsx @@ -4,7 +4,11 @@ import { PropsWithChildren } from "react"; import styles from "./BorderedTiles.module.scss"; export const BorderedTiles: React.FC> = ({ children, className }) => { - return
{children}
; + return ( +
+
{children}
+
+ ); }; export const BorderedTile: React.FC> = ({ children, className }) => { diff --git a/airbyte-webapp/src/components/ui/Card/Card.tsx b/airbyte-webapp/src/components/ui/Card/Card.tsx index 7d6879261bd..2f4d532415d 100644 --- a/airbyte-webapp/src/components/ui/Card/Card.tsx +++ b/airbyte-webapp/src/components/ui/Card/Card.tsx @@ -11,7 +11,7 @@ import { Heading } from "../Heading"; import { Icon } from "../Icon"; import { InfoTooltip } from "../Tooltip"; -interface CardProps { +export interface CardProps { /** * The title of the card */ diff --git a/airbyte-webapp/src/components/ui/Card/HighlightCard.module.scss b/airbyte-webapp/src/components/ui/Card/HighlightCard.module.scss new file mode 100644 index 00000000000..e947647987a --- /dev/null +++ b/airbyte-webapp/src/components/ui/Card/HighlightCard.module.scss @@ -0,0 +1,10 @@ +@use "scss/colors"; +@use "scss/variables"; + +.highlightCard__card { + border: variables.$border-thin solid colors.$blue-400; +} + +.highlightCard__cardBody { + background: colors.$blue-30; +} diff --git a/airbyte-webapp/src/components/ui/Card/HighlightCard.stories.tsx b/airbyte-webapp/src/components/ui/Card/HighlightCard.stories.tsx new file mode 100644 index 00000000000..c09d7266718 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Card/HighlightCard.stories.tsx @@ -0,0 +1,13 @@ +import { Meta, StoryFn } from "@storybook/react"; + +import { HighlightCard } from "./HighlightCard"; + +export default { + title: "Ui/HighlightCard", + component: HighlightCard, +} as Meta; + +const Template: StoryFn = (args) => Inner content; + +export const Default = Template.bind({}); +Default.args = {}; diff --git a/airbyte-webapp/src/components/ui/Card/HighlightCard.tsx b/airbyte-webapp/src/components/ui/Card/HighlightCard.tsx new file mode 100644 index 00000000000..9c8a8cd951b --- /dev/null +++ b/airbyte-webapp/src/components/ui/Card/HighlightCard.tsx @@ -0,0 +1,12 @@ +import { PropsWithChildren } from "react"; + +import { Card } from "./Card"; +import styles from "./HighlightCard.module.scss"; + +export const HighlightCard: React.FC = ({ children, ...restProps }) => { + return ( + + {children} + + ); +}; diff --git a/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.module.scss b/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.module.scss index b9f2866fce7..7b03c995bde 100644 --- a/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.module.scss +++ b/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.module.scss @@ -17,4 +17,5 @@ selection: colors.$dark-blue-50; inactiveSelection: colors.$dark-blue-40; paddingTop: variables.$spacing-lg; + jinja: colors.$blue-500; } diff --git a/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.tsx b/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.tsx index 826705806cf..2331c3b4314 100644 --- a/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.tsx +++ b/airbyte-webapp/src/components/ui/CodeEditor/CodeEditor.tsx @@ -2,24 +2,38 @@ import Editor, { Monaco, useMonaco } from "@monaco-editor/react"; import { KeyCode, KeyMod, editor } from "monaco-editor/esm/vs/editor/editor.api"; import React, { useCallback, useEffect } from "react"; +import { + JINJA_TOKEN, + NON_JINJA_TOKEN, + JINJA_STRING_TOKEN, + JINJA_OTHER_TOKEN, + JINJA_CLOSING_BRACKET_TOKEN, + JINJA_FIRST_BRACKET_TOKEN, +} from "components/connectorBuilder/Builder/jinja"; + import { useAirbyteTheme } from "hooks/theme/useAirbyteTheme"; import styles from "./CodeEditor.module.scss"; import { Spinner } from "../Spinner"; interface CodeEditorProps { + className?: string; value: string; + name?: string; language?: string; readOnly?: boolean; onChange?: (value: string | undefined) => void; + onBlur?: (value: string) => void; height?: string; lineNumberCharacterWidth?: number; - onMount?: (editor: editor.IStandaloneCodeEditor) => void; - automaticLayout?: boolean; + onMount?: (editor: editor.IStandaloneCodeEditor, monaco: Monaco) => void; showSuggestions?: boolean; paddingTop?: boolean; disabled?: boolean; bubbleUpUndoRedo?: boolean; + beforeMount?: (monaco: Monaco) => void; + options?: editor.IStandaloneEditorConstructionOptions; + tabFocusMode?: boolean; } function hslToHex(hue: number, saturation: number, lightness: number) { @@ -36,24 +50,31 @@ function hslToHex(hue: number, saturation: number, lightness: number) { return `#${convertWithOffset(0)}${convertWithOffset(8)}${convertWithOffset(4)}`; } -function cssCustomPropToHex(hslString: string) { +export function cssCustomPropToHex(hslString: string) { const [, h, s, l] = /^hsl\(([0-9]+), ([0-9]+)%, ([0-9]+)%\)$/.exec(hslString)?.map(Number) ?? [0, 0, 0, 0]; return hslToHex(h, s, l); } +let isTabFocusModeOn = false; + export const CodeEditor: React.FC = ({ + className, + name, value, language, readOnly, onChange, + onBlur, height, lineNumberCharacterWidth, onMount, - automaticLayout, paddingTop, showSuggestions = true, bubbleUpUndoRedo, disabled, + beforeMount, + options, + tabFocusMode, }) => { const monaco = useMonaco(); const { colorValues } = useAirbyteTheme(); @@ -74,6 +95,12 @@ export const CodeEditor: React.FC = ({ { token: "delimiter", foreground: cssCustomPropToHex(colorValues[styles.delimiter]) }, { token: "keyword", foreground: cssCustomPropToHex(colorValues[styles.keyword]) }, { token: "comment", foreground: cssCustomPropToHex(colorValues[styles.comment]) }, + { token: NON_JINJA_TOKEN, foreground: cssCustomPropToHex(colorValues[styles.string]) }, + { token: JINJA_TOKEN, foreground: cssCustomPropToHex(colorValues[styles.jinja]) }, + { token: JINJA_STRING_TOKEN, foreground: cssCustomPropToHex(colorValues[styles.jinja]) }, + { token: JINJA_OTHER_TOKEN, foreground: cssCustomPropToHex(colorValues[styles.jinja]) }, + { token: JINJA_FIRST_BRACKET_TOKEN, foreground: cssCustomPropToHex(colorValues[styles.jinja]) }, + { token: JINJA_CLOSING_BRACKET_TOKEN, foreground: cssCustomPropToHex(colorValues[styles.jinja]) }, ], colors: { "editor.background": "#00000000", // transparent, so that parent background is shown instead @@ -99,22 +126,60 @@ export const CodeEditor: React.FC = ({ return ( { - // In cases like the Builder, we have our own undo/redo framework in place, so we want to bubble up the - // undo/redo keyboard commands to the surrounding page when the user presses those keys, rather than triggering - // monaco's internal undo/redo implementation. - editor.addCommand(KeyMod.CtrlCmd | KeyCode.KeyZ, () => - bubbleUpUndoRedo ? bubbleUpUndoRedoEvent("undo", editor) : editor.trigger(undefined, "undo", undefined) - ); - editor.addCommand(KeyMod.CtrlCmd | KeyCode.KeyY, () => - bubbleUpUndoRedo ? bubbleUpUndoRedoEvent("redo", editor) : editor.trigger(undefined, "redo", undefined) - ); - editor.addCommand(KeyMod.CtrlCmd | KeyMod.Shift | KeyCode.KeyZ, () => - bubbleUpUndoRedo ? bubbleUpUndoRedoEvent("redo", editor) : editor.trigger(undefined, "redo", undefined) - ); - - onMount?.(editor); + className={className} + wrapperProps={{ name }} + beforeMount={(monaco: Monaco) => { + setAirbyteTheme(monaco); + beforeMount?.(monaco); + }} + onMount={(editor: editor.IStandaloneCodeEditor, monaco: Monaco) => { + editor.addAction({ + id: "ctrl-z", + label: "Undo (Ctrl + Z)", + keybindings: [KeyMod.CtrlCmd | KeyCode.KeyZ], + run: () => { + bubbleUpUndoRedo ? bubbleUpUndoRedoEvent("undo", editor) : editor.trigger(undefined, "undo", undefined); + }, + }); + + editor.addAction({ + id: "ctrl-y", + label: "Redo (Ctrl + Y)", + keybindings: [KeyMod.CtrlCmd | KeyCode.KeyY], + run: () => { + bubbleUpUndoRedo ? bubbleUpUndoRedoEvent("redo", editor) : editor.trigger(undefined, "redo", undefined); + }, + }); + + editor.addAction({ + id: "ctrl-shift-z", + label: "Redo (Ctrl + Shift + Z)", + keybindings: [KeyMod.CtrlCmd | KeyMod.Shift | KeyCode.KeyZ], + run: () => { + bubbleUpUndoRedo ? bubbleUpUndoRedoEvent("redo", editor) : editor.trigger(undefined, "redo", undefined); + }, + }); + + editor.onDidBlurEditorWidget(() => { + onBlur?.(editor.getValue()); + }); + + // Triggering editor.action.toggleTabFocusMode is the only working way to maintain the behavior + // of focusing the next element when pressing tab instead of inserting a tab character. + // Since this is only a "toggle" command, and the state defaults to false, we keep track if its + // state through a javascript variable, and toggle it accordingly when the editor is focused, + // based on tabFocusMode prop. + editor.onDidFocusEditorWidget(() => { + if (tabFocusMode && !isTabFocusModeOn) { + isTabFocusModeOn = true; + editor.trigger("", "editor.action.toggleTabFocusMode", {}); + } else if (!tabFocusMode && isTabFocusModeOn) { + isTabFocusModeOn = false; + editor.trigger("", "editor.action.toggleTabFocusMode", {}); + } + }); + + onMount?.(editor, monaco); }} loading={} value={value} @@ -125,7 +190,6 @@ export const CodeEditor: React.FC = ({ options={{ lineNumbersMinChars: lineNumberCharacterWidth ?? 2, readOnly: (readOnly || disabled) ?? false, - automaticLayout, matchBrackets: "always", minimap: { enabled: false, @@ -137,6 +201,7 @@ export const CodeEditor: React.FC = ({ } : {}, fixedOverflowWidgets: true, + ...options, }} /> ); diff --git a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss index e45b9d0a1e5..a345efb4dcb 100644 --- a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss +++ b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.module.scss @@ -1,7 +1,7 @@ @use "scss/variables"; @use "scss/colors"; -$icon-width: 18px; +$icon-width: 20px; .container:not(:last-child) { margin-bottom: variables.$spacing-xl; @@ -62,6 +62,7 @@ $icon-width: 18px; text-overflow: ellipsis; overflow: hidden; white-space: nowrap; + line-height: 1; } .icon { @@ -79,6 +80,7 @@ $icon-width: 18px; .body { width: 100%; + word-break: break-word; &:not(&--noPadding) { padding-left: calc($icon-width + variables.$spacing-sm); diff --git a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx index 2c405004b71..d358fe84f48 100644 --- a/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx +++ b/airbyte-webapp/src/components/ui/Collapsible/Collapsible.tsx @@ -55,7 +55,7 @@ export const Collapsible: React.FC> = onClick={() => onClick?.(!open)} > { // Stores the value that the user types in to filter the options const [query, setQuery] = useState(""); @@ -248,6 +252,7 @@ export const ComboBox = ({ immediate as="div" data-testid={testId} + className={className} > @@ -280,6 +285,7 @@ export const ComboBox = ({ onBlur={onBlur ? (e) => onBlur?.(e) : fieldInputProps?.onBlur} disabled={disabled} data-testid={testId ? `${testId}--input` : undefined} + placeholder={placeholder} /> diff --git a/airbyte-webapp/src/components/ui/Icon/Icon.tsx b/airbyte-webapp/src/components/ui/Icon/Icon.tsx index f9e04b20a53..4f770e87fee 100644 --- a/airbyte-webapp/src/components/ui/Icon/Icon.tsx +++ b/airbyte-webapp/src/components/ui/Icon/Icon.tsx @@ -80,6 +80,7 @@ import LinkIcon from "./icons/linkIcon.svg?react"; import LoadingIcon from "./icons/loadingIcon.svg?react"; import LocationIcon from "./icons/locationIcon.svg?react"; import LockIcon from "./icons/lockIcon.svg?react"; +import MappingIcon from "./icons/mappingIcon.svg?react"; import MenuIcon from "./icons/menuIcon.svg?react"; import MetricSuccessHighIcon from "./icons/metricSuccessHighIcon.svg?react"; import MetricSuccessLowIcon from "./icons/metricSuccessLowIcon.svg?react"; @@ -247,6 +248,7 @@ export const Icons: Record>> = loading: LoadingIcon, location: LocationIcon, lock: LockIcon, + mapping: MappingIcon, menu: MenuIcon, metricSuccessHigh: MetricSuccessHighIcon, metricSuccessLow: MetricSuccessLowIcon, diff --git a/airbyte-webapp/src/components/ui/Icon/icons/mappingIcon.svg b/airbyte-webapp/src/components/ui/Icon/icons/mappingIcon.svg new file mode 100644 index 00000000000..09e9a6192e4 --- /dev/null +++ b/airbyte-webapp/src/components/ui/Icon/icons/mappingIcon.svg @@ -0,0 +1,3 @@ + + + diff --git a/airbyte-webapp/src/components/ui/Icon/types.ts b/airbyte-webapp/src/components/ui/Icon/types.ts index e77e7e3aebf..ba7bf4f4f85 100644 --- a/airbyte-webapp/src/components/ui/Icon/types.ts +++ b/airbyte-webapp/src/components/ui/Icon/types.ts @@ -76,6 +76,7 @@ export type IconType = | "loading" | "location" | "lock" + | "mapping" | "menu" | "metricSuccessHigh" | "metricSuccessLow" diff --git a/airbyte-webapp/src/components/ui/Link/Link.module.scss b/airbyte-webapp/src/components/ui/Link/Link.module.scss index 15080e7cd72..d37d5bba4df 100644 --- a/airbyte-webapp/src/components/ui/Link/Link.module.scss +++ b/airbyte-webapp/src/components/ui/Link/Link.module.scss @@ -18,6 +18,24 @@ } } + &--button--primary { + @include mixins.base-button; + + // primary + color: colors.$white; + width: fit-content; + + &:hover { + background-color: colors.$blue-500; + } + + &:active { + background-color: colors.$blue-600; + } + + background-color: colors.$blue-400; + } + &--button { @include mixins.base-button; diff --git a/airbyte-webapp/src/components/ui/Link/Link.tsx b/airbyte-webapp/src/components/ui/Link/Link.tsx index fb94a969d49..e3b7c89a22e 100644 --- a/airbyte-webapp/src/components/ui/Link/Link.tsx +++ b/airbyte-webapp/src/components/ui/Link/Link.tsx @@ -7,7 +7,7 @@ import { getLinkClassNames } from "./getLinkClassNames"; export interface LinkProps { className?: string; opensInNewTab?: boolean; - variant?: "default" | "primary" | "button"; + variant?: "default" | "primary" | "button" | "buttonPrimary"; onClick?: ComponentProps["onClick"]; title?: string; } diff --git a/airbyte-webapp/src/components/ui/Link/getLinkClassNames.ts b/airbyte-webapp/src/components/ui/Link/getLinkClassNames.ts index 5ee56fc3f7f..26361d2a8f2 100644 --- a/airbyte-webapp/src/components/ui/Link/getLinkClassNames.ts +++ b/airbyte-webapp/src/components/ui/Link/getLinkClassNames.ts @@ -10,7 +10,11 @@ type GetClassNamesArgs = Pick & { export const getLinkClassNames = ({ className, variant }: GetClassNamesArgs) => { return classNames( styles.link, - { [styles["link--primary"]]: variant === "primary", [styles["link--button"]]: variant === "button" }, + { + [styles["link--primary"]]: variant === "primary", + [styles["link--button"]]: variant === "button", + [styles["link--button--primary"]]: variant === "buttonPrimary", + }, className ); }; diff --git a/airbyte-webapp/src/components/ui/ListBox/InlineListBox.module.scss b/airbyte-webapp/src/components/ui/ListBox/InlineListBox.module.scss deleted file mode 100644 index 6bcc71bad7e..00000000000 --- a/airbyte-webapp/src/components/ui/ListBox/InlineListBox.module.scss +++ /dev/null @@ -1,11 +0,0 @@ -.controlButton { - all: unset; -} - -.button.button { - padding-left: unset; -} - -.option { - white-space: nowrap; -} diff --git a/airbyte-webapp/src/components/ui/ListBox/InlineListBox.tsx b/airbyte-webapp/src/components/ui/ListBox/InlineListBox.tsx deleted file mode 100644 index 8391be68f7a..00000000000 --- a/airbyte-webapp/src/components/ui/ListBox/InlineListBox.tsx +++ /dev/null @@ -1,36 +0,0 @@ -import React from "react"; -import { useIntl } from "react-intl"; - -import { Text } from "components/ui/Text"; - -import styles from "./InlineListBox.module.scss"; -import { ListBox, ListBoxControlButtonProps, ListBoxProps } from "./ListBox"; -import { Button } from "../Button"; - -type InlineListBoxProps = Omit, "controlButton" | "buttonClassName" | "optionClassName">; - -export const InlineListBox = ({ ...rest }: InlineListBoxProps) => { - const { formatMessage } = useIntl(); - - const ControlButton: React.FC = ({ selectedOption, isDisabled }: ListBoxControlButtonProps) => ( - - ); - - return ( - - {...(rest as ListBoxProps)} - controlButton={ControlButton} - buttonClassName={styles.controlButton} - optionClassName={styles.option} - /> - ); -}; diff --git a/airbyte-webapp/src/components/ui/ListBox/ListBox.tsx b/airbyte-webapp/src/components/ui/ListBox/ListBox.tsx index f9e686877ff..7327d9a7c4e 100644 --- a/airbyte-webapp/src/components/ui/ListBox/ListBox.tsx +++ b/airbyte-webapp/src/components/ui/ListBox/ListBox.tsx @@ -21,9 +21,10 @@ import { Icon } from "../Icon"; export interface ListBoxControlButtonProps { selectedOption?: Option; isDisabled?: boolean; + placeholder?: string; } -const DefaultControlButton = ({ selectedOption, isDisabled }: ListBoxControlButtonProps) => { +const DefaultControlButton = ({ placeholder, selectedOption, isDisabled }: ListBoxControlButtonProps) => { const { formatMessage } = useIntl(); return ( @@ -41,7 +42,7 @@ const DefaultControlButton = ({ selectedOption, isDisabled }: ListBoxControl ) : ( - {formatMessage({ id: "form.selectValue" })} + {placeholder ?? formatMessage({ id: "form.selectValue" })} )} @@ -70,9 +71,18 @@ export interface ListBoxProps { buttonClassName?: string; id?: string; isDisabled?: boolean; + /** + * Custom button content for the OriginalListboxButton. + * This prop allows you to provide custom content to be used inside the control button for the ListBox. + */ controlButton?: React.ComponentType>; - "data-testid"?: string; + /** + * Custom element type for the OriginalListboxButton. + * This prop allows you to replace the original ListBox control button with a custom element type. + */ + controlButtonAs?: ComponentPropsWithoutRef["as"]; hasError?: boolean; + placeholder?: string; /** * Floating menu placement */ @@ -94,6 +104,7 @@ export interface ListBoxProps { */ footerOption?: React.ReactNode; onFocus?: () => void; + "data-testid"?: string; } export const MIN_OPTIONS_FOR_VIRTUALIZATION = 30; @@ -104,7 +115,12 @@ export const ListBox = ({ selectedValue, onSelect, buttonClassName, + /** + * TODO: this is not an actual button, just button content + * issue_link: https://github.com/airbytehq/airbyte-internal-issues/issues/11011 + */ controlButton: ControlButton = DefaultControlButton, + controlButtonAs, optionsMenuClassName, optionClassName, optionTextAs, @@ -203,6 +219,10 @@ export const ListBox = ({ })} > + {/** + * TODO: extract(or reuse?) Float component as we did in @MultiCatalogComboBox + * issue_link: https://github.com/airbytehq/airbyte-internal-issues/issues/11011 + */} ({ }} > e.stopPropagation()} + onClick={(e: React.MouseEvent) => e.stopPropagation()} {...(testId && { "data-testid": `${testId}-listbox-button`, })} id={id} + as={controlButtonAs} onFocus={onFocus} > diff --git a/airbyte-webapp/src/components/ui/ScrollParent/ScrollParent.module.scss b/airbyte-webapp/src/components/ui/ScrollParent/ScrollParent.module.scss index 0b7528c1d50..d7f0cb3003a 100644 --- a/airbyte-webapp/src/components/ui/ScrollParent/ScrollParent.module.scss +++ b/airbyte-webapp/src/components/ui/ScrollParent/ScrollParent.module.scss @@ -1,3 +1,7 @@ +@use "scss/variables"; + .container { + height: 100%; overflow-y: auto; + padding: variables.$spacing-xl; } diff --git a/airbyte-webapp/src/core/api/hooks/billing.ts b/airbyte-webapp/src/core/api/hooks/billing.ts index e86901b60b4..1a1a43929e1 100644 --- a/airbyte-webapp/src/core/api/hooks/billing.ts +++ b/airbyte-webapp/src/core/api/hooks/billing.ts @@ -2,7 +2,7 @@ import { useMutation, useQuery } from "@tanstack/react-query"; import { getCustomerPortalLink, - getOrganizationBalance, + getSubscriptionInfo, getPaymentInformation, listPastInvoices, } from "../generated/AirbyteClient"; @@ -12,7 +12,7 @@ import { useRequestOptions } from "../useRequestOptions"; export const billingKeys = { all: [SCOPE_ORGANIZATION, "billing"] as const, - upcomingInvoice: (organizationId: string) => [...billingKeys.all, "upcomingInvoice", organizationId] as const, + subscriptionInfo: (organizationId: string) => [...billingKeys.all, "subscriptionInfo", organizationId] as const, invoices: (organizationId: string) => [...billingKeys.all, "invoices", organizationId] as const, paymentMethod: (organizationId: string) => [...billingKeys.all, "paymentMethod", organizationId] as const, }; @@ -47,10 +47,10 @@ export const useGetPaymentInformation = (organizationId: string) => { ); }; -export const useGetOrganizationBillingBalance = (organizationId: string) => { +export const useGetOrganizationSubscriptionInfo = (organizationId: string) => { const requestOptions = useRequestOptions(); - return useQuery(billingKeys.upcomingInvoice(organizationId), () => - getOrganizationBalance({ organizationId }, requestOptions) + return useQuery(billingKeys.subscriptionInfo(organizationId), () => + getSubscriptionInfo({ organizationId }, requestOptions) ); }; diff --git a/airbyte-webapp/src/core/api/hooks/cloud/cloudWorkspaces.ts b/airbyte-webapp/src/core/api/hooks/cloud/cloudWorkspaces.ts index 25179035444..2435448bac6 100644 --- a/airbyte-webapp/src/core/api/hooks/cloud/cloudWorkspaces.ts +++ b/airbyte-webapp/src/core/api/hooks/cloud/cloudWorkspaces.ts @@ -1,23 +1,15 @@ -import { useInfiniteQuery, useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; -import { useCallback } from "react"; +import { useInfiniteQuery, useMutation, useQueryClient } from "@tanstack/react-query"; import { useCurrentUser } from "core/services/auth"; import { deleteCloudWorkspace, - getCloudWorkspace, - getCloudWorkspaceUsage, updateCloudWorkspace, webBackendCreatePermissionedCloudWorkspace, webBackendListWorkspacesByUserPaginated, } from "../../generated/CloudApi"; import { SCOPE_USER } from "../../scopes"; -import { - CloudWorkspaceRead, - CloudWorkspaceReadList, - ConsumptionTimeWindow, - PermissionedCloudWorkspaceCreate, -} from "../../types/CloudApi"; +import { CloudWorkspaceRead, CloudWorkspaceReadList, PermissionedCloudWorkspaceCreate } from "../../types/CloudApi"; import { useRequestOptions } from "../../useRequestOptions"; import { useSuspenseQuery } from "../../useSuspenseQuery"; import { useListPermissions } from "../permissions"; @@ -27,8 +19,6 @@ export const workspaceKeys = { all: [SCOPE_USER, "cloud_workspaces"] as const, lists: () => [...workspaceKeys.all, "list"] as const, list: (filters: string | Record) => [...workspaceKeys.lists(), { filters }] as const, - detail: (id: number | string) => [...workspaceKeys.all, "detail", id] as const, - usage: (id: number | string, timeWindow: string) => [...workspaceKeys.all, id, timeWindow, "usage"] as const, }; type CloudWorkspaceCount = { count: "zero" } | { count: "one"; workspace: CloudWorkspaceRead } | { count: "multiple" }; @@ -118,13 +108,6 @@ export function useUpdateCloudWorkspace() { return { workspaces: [...list.slice(0, index), result, ...list.slice(index + 1)] }; }); - - queryClient.setQueryData(workspaceKeys.detail(result.workspaceId), (old) => { - return { - ...old, - ...result, - }; - }); }, } ); @@ -143,47 +126,6 @@ export function useRemoveCloudWorkspace() { }); } -export function getCloudWorkspaceQueryKey(workspaceId: string) { - return workspaceKeys.detail(workspaceId); -} - -export function useGetCloudWorkspaceQuery(workspaceId: string) { - const requestOptions = useRequestOptions(); - - return () => getCloudWorkspace({ workspaceId }, requestOptions); -} - -export function useGetCloudWorkspace(workspaceId: string) { - const queryKey = getCloudWorkspaceQueryKey(workspaceId); - const queryFn = useGetCloudWorkspaceQuery(workspaceId); - - return useSuspenseQuery(queryKey, queryFn); -} - -export function useGetCloudWorkspaceAsync(workspaceId: string) { - const queryKey = getCloudWorkspaceQueryKey(workspaceId); - const queryFn = useGetCloudWorkspaceQuery(workspaceId); - - return useQuery(queryKey, queryFn).data; -} - -export function useInvalidateCloudWorkspace(workspaceId: string): () => Promise { - const queryClient = useQueryClient(); - - return useCallback( - () => queryClient.invalidateQueries(workspaceKeys.detail(workspaceId)), - [queryClient, workspaceId] - ); -} - -export function useGetCloudWorkspaceUsage(workspaceId: string, timeWindow: ConsumptionTimeWindow) { - const requestOptions = useRequestOptions(); - - return useSuspenseQuery(workspaceKeys.usage(workspaceId, timeWindow), () => - getCloudWorkspaceUsage({ workspaceId, timeWindow }, requestOptions) - ); -} - /** * Checks whether a user is in a foreign workspace. A foreign workspace is any workspace the user doesn't * have explicit permissions to via workspace permissions or being part of the organization the workspace is in. diff --git a/airbyte-webapp/src/core/api/hooks/cloud/index.ts b/airbyte-webapp/src/core/api/hooks/cloud/index.ts index 651140d584a..6102d118a1d 100644 --- a/airbyte-webapp/src/core/api/hooks/cloud/index.ts +++ b/airbyte-webapp/src/core/api/hooks/cloud/index.ts @@ -1,6 +1,5 @@ export * from "./cloudWorkspaces"; export * from "./dbtCloud"; -export * from "./stripe"; export * from "./usePrefetchCloudWorkspaceData"; export * from "./users"; export * from "./useGetWorkspaceUsage"; diff --git a/airbyte-webapp/src/core/api/hooks/cloud/stripe.ts b/airbyte-webapp/src/core/api/hooks/cloud/stripe.ts deleted file mode 100644 index c420d8b7282..00000000000 --- a/airbyte-webapp/src/core/api/hooks/cloud/stripe.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { useMutation } from "@tanstack/react-query"; - -import { createStripeCheckoutSession } from "core/api/generated/CloudApi"; -import { StripeCheckoutSessionCreate } from "core/api/types/CloudApi"; -import { useRequestOptions } from "core/api/useRequestOptions"; - -export const useStripeCheckout = () => { - const requestOptions = useRequestOptions(); - return useMutation((params: StripeCheckoutSessionCreate) => createStripeCheckoutSession(params, requestOptions)); -}; diff --git a/airbyte-webapp/src/core/api/hooks/connections.tsx b/airbyte-webapp/src/core/api/hooks/connections.tsx index f85c14a2471..e3e69fefa72 100644 --- a/airbyte-webapp/src/core/api/hooks/connections.tsx +++ b/airbyte-webapp/src/core/api/hooks/connections.tsx @@ -12,7 +12,7 @@ import { getFrequencyFromScheduleData, useAnalyticsService, Action, Namespace } import { trackError } from "core/utils/datadog"; import { links } from "core/utils/links"; import { useNotificationService } from "hooks/services/Notification"; -import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; +import { CloudSettingsRoutePaths } from "packages/cloud/views/settings/routePaths"; import { RoutePaths } from "pages/routePaths"; import { useCurrentWorkspace, useInvalidateWorkspaceStateQuery } from "./workspaces"; @@ -416,7 +416,7 @@ export const useUpdateConnection = () => { type: "error", text: , actionBtnText: , - onAction: () => navigate(`/${RoutePaths.Workspaces}/${workspaceId}/${CloudRoutes.Billing}`), + onAction: () => navigate(`/${RoutePaths.Workspaces}/${workspaceId}/${CloudSettingsRoutePaths.Billing}`), }); } diff --git a/airbyte-webapp/src/core/api/hooks/connectorBuilderProject.ts b/airbyte-webapp/src/core/api/hooks/connectorBuilderProject.ts index ee12f31d9d5..f288f4a7152 100644 --- a/airbyte-webapp/src/core/api/hooks/connectorBuilderProject.ts +++ b/airbyte-webapp/src/core/api/hooks/connectorBuilderProject.ts @@ -466,20 +466,20 @@ export const useChangeBuilderProjectVersion = () => { export const useBuilderProjectReadStream = ( params: ConnectorBuilderProjectStreamReadRequestBody, - testStream: DeclarativeStream, + testStream: DeclarativeStream | undefined, onSuccess: (data: StreamReadTransformedSlices) => void ) => { const requestOptions = useRequestOptions(); return useQuery( connectorBuilderProjectsKeys.read(params.builderProjectId, params.streamName), - () => - readConnectorBuilderProjectStream(params, requestOptions).then((streamRead) => - transformSlices(streamRead, testStream) - ), + async () => { + const streamRead = await readConnectorBuilderProjectStream(params, requestOptions); + return transformSlices(streamRead, testStream!); + }, { refetchOnWindowFocus: false, - enabled: false, + enabled: !!testStream, onSuccess, } ); diff --git a/airbyte-webapp/src/core/api/hooks/health.ts b/airbyte-webapp/src/core/api/hooks/health.ts index 228bd28effe..b2aeb9b5798 100644 --- a/airbyte-webapp/src/core/api/hooks/health.ts +++ b/airbyte-webapp/src/core/api/hooks/health.ts @@ -1,9 +1,23 @@ -import { useMutation } from "@tanstack/react-query"; +import { useQuery } from "@tanstack/react-query"; import { getHealthCheck } from "../generated/AirbyteClient"; import { useRequestOptions } from "../useRequestOptions"; -export const useHealthCheck = () => { +const HEALTHCHECK_MAX_COUNT = 3; +const HEALTHCHECK_INTERVAL = 20000; + +export const useHealthCheck = (onError: () => void, onSuccess: () => void) => { const requestOptions = useRequestOptions(); - return useMutation(() => getHealthCheck(requestOptions)).mutateAsync; + + const { failureCount } = useQuery(["healthCheck"], () => getHealthCheck(requestOptions), { + refetchInterval: HEALTHCHECK_INTERVAL, + retry: HEALTHCHECK_MAX_COUNT, + retryDelay: HEALTHCHECK_INTERVAL, + onError: () => { + if (failureCount >= HEALTHCHECK_MAX_COUNT) { + onError(); + } + }, + onSuccess, + }); }; diff --git a/airbyte-webapp/src/core/api/hooks/jobs.ts b/airbyte-webapp/src/core/api/hooks/jobs.ts index 532f5c86175..da6ccc6596d 100644 --- a/airbyte-webapp/src/core/api/hooks/jobs.ts +++ b/airbyte-webapp/src/core/api/hooks/jobs.ts @@ -1,5 +1,13 @@ -import { UseQueryOptions, useIsMutating, useMutation, useQuery } from "@tanstack/react-query"; +import { useIsMutating, useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { useCallback } from "react"; +import { useIntl } from "react-intl"; +import { formatLogEvent } from "area/connection/components/JobHistoryItem/useCleanLogs"; +import { trackError } from "core/utils/datadog"; +import { FILE_TYPE_DOWNLOAD, downloadFile, fileizeString } from "core/utils/file"; +import { useNotificationService } from "hooks/services/Notification"; + +import { useCurrentWorkspace } from "./workspaces"; import { cancelJob, getAttemptCombinedStats, @@ -8,7 +16,7 @@ import { getJobInfoWithoutLogs, } from "../generated/AirbyteClient"; import { SCOPE_WORKSPACE } from "../scopes"; -import { AttemptInfoRead, AttemptStats, LogEvents, LogRead } from "../types/AirbyteClient"; +import { AttemptInfoRead, AttemptRead, LogEvents, LogRead } from "../types/AirbyteClient"; import { useRequestOptions } from "../useRequestOptions"; import { useSuspenseQuery } from "../useSuspenseQuery"; @@ -16,14 +24,6 @@ export const jobsKeys = { all: (connectionId: string | undefined) => [SCOPE_WORKSPACE, connectionId] as const, }; -// A disabled useQuery that can be called manually to download job logs -export const useGetDebugInfoJobManual = (id: number) => { - const requestOptions = useRequestOptions(); - return useQuery([SCOPE_WORKSPACE, "jobs", "getDebugInfo", id], () => getJobDebugInfo({ id }, requestOptions), { - enabled: false, - }); -}; - export const useCancelJob = () => { const requestOptions = useRequestOptions(); const mutation = useMutation(["useCancelJob"], (id: number) => cancelJob({ id }, requestOptions)); @@ -54,7 +54,7 @@ export const useJobInfoWithoutLogs = (id: number) => { type AttemptInfoReadWithFormattedLogs = AttemptInfoRead & { logType: "formatted"; logs: LogRead }; type AttemptInfoReadWithStructuredLogs = AttemptInfoRead & { logType: "structured"; logs: LogEvents }; -type AttemptInfoReadWithLogs = AttemptInfoReadWithFormattedLogs | AttemptInfoReadWithStructuredLogs; +export type AttemptInfoReadWithLogs = AttemptInfoReadWithFormattedLogs | AttemptInfoReadWithStructuredLogs; export function attemptHasFormattedLogs(attempt: AttemptInfoRead): attempt is AttemptInfoReadWithFormattedLogs { return attempt.logType === "formatted"; @@ -66,7 +66,7 @@ export function attemptHasStructuredLogs(attempt: AttemptInfoRead): attempt is A export const useAttemptForJob = (jobId: number, attemptNumber: number) => { const requestOptions = useRequestOptions(); - return useSuspenseQuery( + return useQuery( [SCOPE_WORKSPACE, "jobs", "attemptForJob", jobId, attemptNumber], () => getAttemptForJob({ jobId, attemptNumber }, requestOptions) as Promise, { @@ -83,20 +83,96 @@ export const useAttemptForJob = (jobId: number, attemptNumber: number) => { ); }; -export const useAttemptCombinedStatsForJob = ( - jobId: number, - attemptNumber: number, - options?: Readonly, "queryKey" | "queryFn" | "suspense">> -) => { +export const useAttemptCombinedStatsForJob = (jobId: number, attempt: AttemptRead) => { + const requestOptions = useRequestOptions(); + return useQuery( + [SCOPE_WORKSPACE, "jobs", "attemptCombinedStatsForJob", jobId, attempt.id], + () => getAttemptCombinedStats({ jobId, attemptNumber: attempt.id }, requestOptions), + { + refetchInterval: () => { + // if the attempt hasn't ended refetch every 2.5 seconds + return attempt.endedAt ? false : 2500; + }, + } + ); +}; + +export const useDonwnloadJobLogsFetchQuery = () => { const requestOptions = useRequestOptions(); - // the endpoint returns a 404 if there aren't stats for this attempt - try { - return useSuspenseQuery( - [SCOPE_WORKSPACE, "jobs", "attemptCombinedStatsForJob", jobId, attemptNumber], - () => getAttemptCombinedStats({ jobId, attemptNumber }, requestOptions), - options - ); - } catch (e) { - return undefined; - } + const queryClient = useQueryClient(); + const { registerNotification, unregisterNotificationById } = useNotificationService(); + const workspace = useCurrentWorkspace(); + const { formatMessage } = useIntl(); + + return useCallback( + (connectionName: string, jobId: number) => { + // Promise.all() with a timeout is used to ensure that the notification is shown to the user for at least 1 second + queryClient.fetchQuery({ + queryKey: [SCOPE_WORKSPACE, "jobs", "getDebugInfo", jobId], + queryFn: async () => { + const notificationId = `download-logs-${jobId}`; + registerNotification({ + type: "info", + text: formatMessage( + { + id: "jobHistory.logs.logDownloadPending", + }, + { jobId } + ), + id: notificationId, + timeout: false, + }); + try { + return await Promise.all([ + getJobDebugInfo({ id: jobId }, requestOptions) + .then((data) => { + if (!data) { + throw new Error("No logs returned from server"); + } + const file = new Blob( + [ + data.attempts + .flatMap((info, index) => [ + `>> ATTEMPT ${index + 1}/${data.attempts.length}\n`, + ...(attemptHasFormattedLogs(info) ? info.logs.logLines : []), + ...(attemptHasStructuredLogs(info) + ? info.logs.events.map((event) => formatLogEvent(event)) + : []), + `\n\n\n`, + ]) + .join("\n"), + ], + { + type: FILE_TYPE_DOWNLOAD, + } + ); + downloadFile(file, fileizeString(`${connectionName}-logs-${jobId}.txt`)); + }) + .catch((e) => { + trackError(e, { workspaceId: workspace.workspaceId, jobId }); + registerNotification({ + type: "error", + text: formatMessage({ + id: "jobHistory.logs.logDownloadFailed", + }), + id: `download-logs-error-${jobId}`, + }); + }), + new Promise((resolve) => setTimeout(resolve, 1000)), + ]); + } finally { + unregisterNotificationById(notificationId); + } + }, + }); + }, + [ + formatMessage, + queryClient, + registerNotification, + requestOptions, + unregisterNotificationById, + workspace.workspaceId, + ] + ); }; diff --git a/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx b/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx index 38932adb20a..a11520f62ea 100644 --- a/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx +++ b/airbyte-webapp/src/core/services/analytics/pageTrackingCodes.tsx @@ -23,15 +23,17 @@ export enum PageTrackingCodes { CONNECTIONS_ITEM_TIMELINE = "Connections.Item.Timeline", SETTINGS_ACCOUNT = "Settings.Account", SETTINGS_WORKSPACE = "Settings.Workspace", + SETTINGS_WORKSPACE_USAGE = "Settings.WorkspaceUsage", SETTINGS_ORGANIZATION = "Settings.Organization", SETTINGS_ORGANIZATIONMEMBERS = "Settings.OrganizationMembers", + SETTINGS_ORGANIZATION_BILLING = "Settings.OrganizationBilling", + SETTINGS_ORGANIZATION_USAGE = "Settings.OrganizationUsage", SETTINGS_DESTINATION = "Settings.Destination", SETTINGS_SOURCE = "Settings.Source", SETTINGS_CONFIGURATION = "Settings.Configuration", SETTINGS_NOTIFICATION = "Settings.Notifications", SETTINGS_METRICS = "Settings.Metrics", SETTINGS_DATA_RESIDENCY = "Settings.DataResidency", - CREDITS = "Credits", WORKSPACES = "Workspaces", PREFERENCES = "Preferences", } diff --git a/airbyte-webapp/src/core/services/analytics/types.ts b/airbyte-webapp/src/core/services/analytics/types.ts index b0ae99a50b6..f905ec3080d 100644 --- a/airbyte-webapp/src/core/services/analytics/types.ts +++ b/airbyte-webapp/src/core/services/analytics/types.ts @@ -5,7 +5,6 @@ export const enum Namespace { CONNECTOR = "Connector", ONBOARDING = "Onboarding", USER = "User", - CREDITS = "Credits", CONNECTOR_BUILDER = "ConnectorBuilder", SCHEMA = "Schema", ERD = "ERD", @@ -33,7 +32,6 @@ export const enum Action { PREFERENCES = "Preferences", NO_MATCHING_CONNECTOR = "NoMatchingConnector", SELECTION_OPENED = "SelectionOpened", - CHECKOUT_START = "CheckoutStart", INVITE = "Invite", OAUTH_ATTEMPT = "OAuthAttempt", OAUTH_SUCCESS = "OAuthSuccess", diff --git a/airbyte-webapp/src/core/services/features/FeatureService.tsx b/airbyte-webapp/src/core/services/features/FeatureService.tsx index 6995a3cb896..d283bf8876a 100644 --- a/airbyte-webapp/src/core/services/features/FeatureService.tsx +++ b/airbyte-webapp/src/core/services/features/FeatureService.tsx @@ -116,3 +116,11 @@ export const IfFeatureEnabled: React.FC{children} : null; }; + +export const IfFeatureDisabled: React.FC> = ({ + feature, + children, +}) => { + const hasFeature = useFeature(feature); + return !hasFeature ? <>{children} : null; +}; diff --git a/airbyte-webapp/src/core/services/features/constants.ts b/airbyte-webapp/src/core/services/features/constants.ts index 4773846a531..551c172a3ac 100644 --- a/airbyte-webapp/src/core/services/features/constants.ts +++ b/airbyte-webapp/src/core/services/features/constants.ts @@ -4,7 +4,7 @@ export const defaultOssFeatures = [ FeatureItem.AllowAutoDetectSchema, FeatureItem.AllowUpdateConnectors, FeatureItem.AllowUploadCustomImage, - FeatureItem.AllowSyncSubOneHourCronExpressions, + FeatureItem.EnterpriseUpsell, ]; export const defaultEnterpriseFeatures = [ @@ -17,6 +17,7 @@ export const defaultEnterpriseFeatures = [ FeatureItem.EnterpriseLicenseChecking, FeatureItem.FieldHashing, FeatureItem.IndicateGuestUsers, + FeatureItem.MappingsUI, // Also governed by connection.mappingsUI experiment. This flag indicates whether the user has the right level of product. The experiment indicates whether the UI is "on" overall. FeatureItem.MultiWorkspaceUI, FeatureItem.RBAC, ]; @@ -26,7 +27,7 @@ export const defaultCloudFeatures = [ FeatureItem.AllowOAuthConnector, FeatureItem.AllowChangeDataGeographies, FeatureItem.AllowDBTCloudIntegration, - FeatureItem.Billing, + FeatureItem.CloudForTeamsUpsell, FeatureItem.ConnectionHistoryGraphs, FeatureItem.ConnectorBreakingChangeDeadlines, FeatureItem.EmailNotifications, @@ -35,5 +36,5 @@ export const defaultCloudFeatures = [ FeatureItem.RBAC, FeatureItem.RestrictAdminInForeignWorkspace, FeatureItem.ShowInviteUsersHint, - FeatureItem.FieldHashing, // also goverened by connection.hashingUI experiment + FeatureItem.FieldHashing, // also governed by connection.hashingUI experiment ]; diff --git a/airbyte-webapp/src/core/services/features/types.tsx b/airbyte-webapp/src/core/services/features/types.tsx index b0b485e9575..39bbadbc4c1 100644 --- a/airbyte-webapp/src/core/services/features/types.tsx +++ b/airbyte-webapp/src/core/services/features/types.tsx @@ -11,19 +11,20 @@ export enum FeatureItem { AllowUpdateConnectors = "ALLOW_UPDATE_CONNECTORS", AllowOAuthConnector = "ALLOW_OAUTH_CONNECTOR", AllowChangeDataGeographies = "ALLOW_CHANGE_DATA_GEOGRAPHIES", - AllowSyncSubOneHourCronExpressions = "ALLOW_SYNC_SUB_ONE_HOUR_CRON_EXPRESSIONS", - Billing = "BILLING", CloudForTeamsBranding = "CLOUD_FOR_TEAMS_BRANDING", + CloudForTeamsUpsell = "CLOUD_FOR_TEAMS_UPSELLING", ConnectionHistoryGraphs = "CONNECTION_HISTORY_GRAPHS", ConnectorBreakingChangeDeadlines = "CONNECTOR_BREAKING_CHANGE_DEADLINES", DiagnosticsExport = "DIAGNOSTICS_EXPORT", DisplayOrganizationUsers = "DISPLAY_ORGANIZATION_USERS", EmailNotifications = "EMAIL_NOTIFICATIONS", EnterpriseBranding = "ENTERPRISE_BRANDING", + EnterpriseUpsell = "ENTERPRISE_UPSELL", EnterpriseLicenseChecking = "ENTERPRISE_LICENSE_CHECKING", ExternalInvitations = "EXTERNAL_INVITATIONS", FieldHashing = "FIELD_HASHING", IndicateGuestUsers = "INDICATE_GUEST_USERS", + MappingsUI = "MAPPINGS_UI", MultiWorkspaceUI = "MULTI_WORKSPACE_UI", RBAC = "RBAC", RestrictAdminInForeignWorkspace = "RESTRICT_ADMIN_IN_FOREIGN_WORKSPACE", diff --git a/airbyte-webapp/src/core/utils/links.ts b/airbyte-webapp/src/core/utils/links.ts index 90fe7b35afe..fb19730ea9c 100644 --- a/airbyte-webapp/src/core/utils/links.ts +++ b/airbyte-webapp/src/core/utils/links.ts @@ -35,7 +35,6 @@ export const links = { connectionDataResidency: `${BASE_DOCS_LINK}/cloud/managing-airbyte-cloud/manage-data-residency#choose-the-data-residency-for-a-connection`, lowCodeYamlDescription: `${BASE_DOCS_LINK}/connector-development/config-based/understanding-the-yaml-file/yaml-overview`, upcomingFeaturesPage: "https://embedded.airbyte.com/upcoming-releases", - stripeCustomerPortal: "https://billing.stripe.com/p/login/5kAbJs2Gy83H2YwdQQ", iso8601Duration: "https://en.wikipedia.org/wiki/ISO_8601#Durations", ossSecurityDocs: `${BASE_DOCS_LINK}/operator-guides/security/#securing-airbyte-open-source`, connectorBuilderAuthentication: `${BASE_DOCS_LINK}/connector-development/connector-builder-ui/authentication`, @@ -54,14 +53,13 @@ export const links = { pricingPage: "https://airbyte.com/pricing", usingCustomConnectors: `${BASE_DOCS_LINK}/operator-guides/using-custom-connectors/`, gettingSupport: `${BASE_DOCS_LINK}/community/getting-support`, - autoRechargeEnrollment: `${BASE_DOCS_LINK}/cloud/managing-airbyte-cloud/manage-credits#automatic-reload-of-credits-beta`, connectorSpecificationDocs: `${BASE_DOCS_LINK}/connector-development/connector-specification-reference/#airbyte-modifications-to-jsonschema`, schemaChangeManagement: `${BASE_DOCS_LINK}/using-airbyte/schema-change-management`, apiAccess: `${BASE_DOCS_LINK}/using-airbyte/configuring-api-access`, deployingViaHttp: `${BASE_DOCS_LINK}/using-airbyte/getting-started/oss-quickstart#running-over-http`, ossAuthentication: `${BASE_DOCS_LINK}/deploying-airbyte/integrations/authentication`, - enterpriseTalkToSales: - "https://airbyte.com/company/talk-to-sales?utm_source=airbyte&utm_medium=product&utm_content=enterprise-connector", + featureTalkToSales: + "https://airbyte.com/company/talk-to-sales?utm_source=airbyte&utm_medium=product&utm_content=feature-{feature}", billingNotificationsForm: "https://airbyte.retool.com/form/f06009f2-aad6-4df4-bb54-41f3b17d50d2?orgId={organizationId}", } as const; diff --git a/airbyte-webapp/src/core/utils/rbac/intents.ts b/airbyte-webapp/src/core/utils/rbac/intents.ts index d31a2e48ae7..2afae86e467 100644 --- a/airbyte-webapp/src/core/utils/rbac/intents.ts +++ b/airbyte-webapp/src/core/utils/rbac/intents.ts @@ -10,7 +10,6 @@ export const intentToRbacQuery = { ViewLicenseDetails: { resourceType: "WORKSPACE", role: "READER" }, // workspace - BuyCredits: { resourceType: "WORKSPACE", role: "ADMIN" }, DeleteWorkspace: { resourceType: "WORKSPACE", role: "ADMIN" }, DownloadDiagnostics: { resourceType: "WORKSPACE", role: "READER" }, UpdateWorkspace: [ diff --git a/airbyte-webapp/src/hooks/services/Experiment/experiments.ts b/airbyte-webapp/src/hooks/services/Experiment/experiments.ts index 55649fa421f..f8cbc8680ea 100644 --- a/airbyte-webapp/src/hooks/services/Experiment/experiments.ts +++ b/airbyte-webapp/src/hooks/services/Experiment/experiments.ts @@ -9,10 +9,9 @@ export interface Experiments { "authPage.rightSideUrl": string | undefined; "billing.early-sync-enabled": boolean; - "billing.autoRecharge": boolean; - "billing.migrationMaintenance": boolean; "connection.columnSelection": boolean; "connection.hashingUI": boolean; + "connection.mappingsUI": boolean; "connection.onboarding.destinations": string; "connection.onboarding.sources": string; "connection.rateLimitedUI": boolean; @@ -21,7 +20,6 @@ export interface Experiments { "connector.suggestedDestinationConnectors": string; "connectorBuilder.aiAssist.enabled": boolean; "connectorBuilder.contributeEditsToMarketplace": boolean; - "logs.structured-logs-ui": boolean; "settings.breakingChangeNotifications": boolean; "settings.downloadDiagnostics": boolean; "settings.organizationRbacImprovements": boolean; @@ -31,11 +29,10 @@ export interface Experiments { export const defaultExperimentValues: Experiments = { "authPage.rightSideUrl": undefined, - "billing.autoRecharge": false, "billing.early-sync-enabled": false, - "billing.migrationMaintenance": false, "connection.columnSelection": true, "connection.hashingUI": true, // also requires FeatureItem.FieldHashing + "connection.mappingsUI": false, "connection.onboarding.destinations": "", "connection.onboarding.sources": "", "connection.rateLimitedUI": false, @@ -45,7 +42,6 @@ export const defaultExperimentValues: Experiments = { "connector.suggestedSourceConnectors": "", "connectorBuilder.aiAssist.enabled": false, "connectorBuilder.contributeEditsToMarketplace": true, - "logs.structured-logs-ui": false, "settings.breakingChangeNotifications": false, "settings.downloadDiagnostics": false, "settings.organizationRbacImprovements": false, diff --git a/airbyte-webapp/src/hooks/services/Health/HealthPollService.tsx b/airbyte-webapp/src/hooks/services/Health/HealthPollService.tsx index 5cf17a2677f..c3d9a556ebf 100644 --- a/airbyte-webapp/src/hooks/services/Health/HealthPollService.tsx +++ b/airbyte-webapp/src/hooks/services/Health/HealthPollService.tsx @@ -1,4 +1,3 @@ -import { useEffect, useState } from "react"; import { useIntl } from "react-intl"; import { useHealthCheck } from "core/api"; @@ -7,40 +6,19 @@ import { useNotificationService } from "hooks/services/Notification/Notification import { Notification } from "../Notification"; const HEALTH_NOTIFICATION_ID = "health.error"; -const HEALTHCHECK_MAX_COUNT = 3; -const HEALTHCHECK_INTERVAL = 20000; -function useApiHealthPoll(): void { - const [count, setCount] = useState(0); +export const useApiHealthPoll = (): void => { const { formatMessage } = useIntl(); - const healthCheck = useHealthCheck(); const { registerNotification, unregisterNotificationById } = useNotificationService(); - useEffect(() => { - const errorNotification: Notification = { - id: HEALTH_NOTIFICATION_ID, - text: formatMessage({ id: "notifications.error.health" }), - type: "error", - }; - - const interval = setInterval(async () => { - try { - await healthCheck(); - if (count >= HEALTHCHECK_MAX_COUNT) { - unregisterNotificationById(HEALTH_NOTIFICATION_ID); - } - setCount(0); - } catch (e) { - if (count < HEALTHCHECK_MAX_COUNT) { - setCount((count) => ++count); - } else { - registerNotification(errorNotification); - } - } - }, HEALTHCHECK_INTERVAL); - - return () => clearInterval(interval); - }, [count, formatMessage, unregisterNotificationById, registerNotification, healthCheck]); -} - -export { useApiHealthPoll }; + const errorNotification: Notification = { + id: HEALTH_NOTIFICATION_ID, + text: formatMessage({ id: "notifications.error.health" }), + type: "error", + }; + + useHealthCheck( + () => registerNotification(errorNotification), + () => unregisterNotificationById(HEALTH_NOTIFICATION_ID) + ); +}; diff --git a/airbyte-webapp/src/hooks/theme/useAirbyteTheme.module.scss b/airbyte-webapp/src/hooks/theme/useAirbyteTheme.module.scss index 31c4dfdc613..ef919a47a10 100644 --- a/airbyte-webapp/src/hooks/theme/useAirbyteTheme.module.scss +++ b/airbyte-webapp/src/hooks/theme/useAirbyteTheme.module.scss @@ -17,12 +17,21 @@ colors.$orange-600 colors.$green-200 + colors.$green-300 + colors.$green-400 + colors.$green-500 colors.$green-600 + colors.$green-700 + colors.$green-800 colors.$grey-50 colors.$grey-100 colors.$grey-300 colors.$grey-400 + colors.$grey-500 + colors.$grey-600 + colors.$grey-700 + colors.$grey-800 colors.$grey-900 colors.$dark-blue diff --git a/airbyte-webapp/src/locales/en.errors.json b/airbyte-webapp/src/locales/en.errors.json index 05f9f126940..07c065b3298 100644 --- a/airbyte-webapp/src/locales/en.errors.json +++ b/airbyte-webapp/src/locales/en.errors.json @@ -1,5 +1,7 @@ { "cron-validation/invalid-timezone": "The timezone {cronTimezone} is currently not supported. Please chose another timezone.", + "cron-validation/invalid-expression": "The cron expression {cronExpression} is invalid.{validationErrorMessage, select, undefined {} other { Validation error: {validationErrorMessage}}}", + "cron-validation/under-one-hour-not-allowed": "Cron expressions that execute more frequently than once per hour are disabled. Contact us to unlock this feature.", "dbtcloud/access-denied": "dbt Cloud denied access. Please verify your service token and access URL.", "dbtcloud/paid-plan-required": "To use the dbt Cloud integration, you must be on a paid plan.", "dbtcloud/generic": "There was an error communicating with dbt Cloud.", diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 644044231b2..03f96cf5ada 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -714,6 +714,46 @@ "connection.actions.cancel.clear.confirm.submit": "Yes, cancel clear", "connection.actions.cancel.clear.confirm.cancel": "No, continue clear", + "connections.mappings.title": "Mappings", + "connections.mappings.mappingType": "Mapping type", + "connections.mappings.emptyState": "No mappings yet", + "connections.mappings.selectStream": "Select a stream to apply mappings", + "connections.mappings.selectAStream": "Select a stream", + "connections.mappings.emptyState.upsellTitle": "Unlock the power of mappings", + "connections.mappings.emptyState.upsellBody": "Hash, encrypt, filter rows, rename fields, and more. Transform your data directly in Airbyte to ensure it arrives exactly how you want it to.", + "connections.mappings.emptyState.upsellFooter": "Talk to our sales team about upgrading to {product, select, teams {Teams} enterprise {Enterprise} other {Teams}} to unlock this feature.", + "connections.mappings.emptyState.upsellButton": "Unlock mappings", + "connections.mapping.fieldName": "Field name", + "connections.mappings.using": "using", + "connections.mappings.to": "to", + "connections.mappings.type.hash": "Hash", + "connections.mappings.hashing.method": "Method", + "connections.mappings.removeAll": "Remove all mappings from this connection", + "connections.mappings.type.hash.description": "Protect sensitive information by obscuring it irreversibly.", + "connections.mappings.type.fieldRenaming": "Rename field", + "connections.mappings.type.fieldRenaming.description": "Change the name of a field.", + "connections.mappings.type.rowFiltering": "Filter rows", + "connections.mappings.type.rowFiltering.description": "Include or exclude rows by their value.", + "connections.mappings.type.encryption": "Encrypt", + "connections.mappings.type.encryption.description": "Obscure sensitive information reversibly.", + "connections.mappings.if": "if the value in", + "connections.mappings.equals": "equals", + "connections.mappings.value": "value", + "connections.mappings.andKey": "and key:", + "connections.mappings.encryption.method": "encryption method", + "connections.mappings.encryption.key": "Key", + "connections.mappings.encryption.publicKey": "Public key", + "connections.mappings.newFieldName": "New field name", + "connections.mappings.fieldName": "Field name", + "connections.mappings.selectField": "Select a field", + "connections.mappings.encryption.mode": "Mode", + "connections.mappings.encryption.padding": "Padding", + "connections.mappings.addMapping": "+ Add mapping", + "connections.mappings.addStream": "+ Add stream", + "connections.mappings.addStream.disabled": "All enabled streams already have mappings", + "connections.mappings.rowFilter.in": "in", + "connections.mappings.rowFilter.out": "out", + "connection.timeline": "Timeline", "connection.timeline.empty": "No events to display", "connection.timeline.error": "Error: ", @@ -781,14 +821,48 @@ "connection.timeline.schema_update.fieldChanges": "{streams, plural, one {# stream} other {# streams}} changed", "connection.timeline.schema_update.viewDetails": "View details", + "connection.timeline.connector_update.source.version": "Connector source version changed", + "connection.timeline.connector_update.source.version.description.reason.updated.USER": "{user} changed {source} from {from} to {to}", + "connection.timeline.connector_update.source.version.description.reason.upgraded.USER": "{user} upgraded {source} from {from} to {to}", + "connection.timeline.connector_update.source.version.description.reason.downgraded.USER": "{user} downgraded {source} from {from} to {to}", + "connection.timeline.connector_update.source.version.description.reason.upgraded.SYSTEM": "Airbyte upgraded {source} from {from} to {to}", + "connection.timeline.connector_update.source.version.description.reason.downgraded.SYSTEM": "Airbyte downgraded {source} from {from} to {to}", + "connection.timeline.connector_update.destination.version": "Connector destination version changed", + "connection.timeline.connector_update.destination.version.description.reason.updated.USER": "{user} changed {destination} from {from} to {to}", + "connection.timeline.connector_update.destination.version.description.reason.upgraded.USER": "{user} upgraded {destination} from {from} to {to}", + "connection.timeline.connector_update.destination.version.description.reason.downgraded.USER": "{user} downgraded {destination} from {from} to {to}", + "connection.timeline.connector_update.destination.version.description.reason.upgraded.SYSTEM": "Airbyte upgraded {destination} from {from} to {to}", + "connection.timeline.connector_update.destination.version.description.reason.downgraded.SYSTEM": "Airbyte downgraded {destination} from {from} to {to}", + + "connection.timeline.mapping_create": "Mapping created", + "connection.timeline.mapping_create.hashing.description": "{user} created a hashing mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_create.field_renaming.description": "{user} created a field-renaming mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_create.row_filtering.description": "{user} created a row-filtering mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_create.encryption.description": "{user} created an encryption mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + + "connection.timeline.mapping_update": "Mapping edited", + "connection.timeline.mapping_update.hashing.description": "{user} updated a hashing mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_update.field_renaming.description": "{user} updated a field-renaming mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_update.row_filtering.description": "{user} updated a row-filtering mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_update.encryption.description": "{user} updated an encryption mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + + "connection.timeline.mapping_delete": "Mapping deleted", + "connection.timeline.mapping_delete.hashing.description": "{user} deleted a hashing mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_delete.field_renaming.description": "{user} deleted a field-renaming mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_delete.row_filtering.description": "{user} deleted a row-filtering mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + "connection.timeline.mapping_delete.encryption.description": "{user} deleted an encryption mapping for the {stream} stream{namespace, select, undefined {} other { in the {namespace} namespace}}.", + + "connectionAutoDisabledReason.TOO_MANY_FAILED_JOBS_WITH_NO_RECENT_SUCCESS": "Airbyte disabled the connection due to consecutive failures with no recent success.", "connectionAutoDisabledReason.ONLY_FAILED_JOBS_RECENTLY": "Airbyte disabled the connection due to only failed jobs recently.", "connectionAutoDisabledReason.TOO_MANY_CONSECUTIVE_FAILED_JOBS_IN_A_ROW": "Airbyte disabled the connection due to consecutive failures.", "connectionAutoDisabledReason.SCHEMA_CHANGES_ARE_BREAKING": "Airbyte disabled the connection due to a detected breaking schema change. Resolve the error in the Schema tab to re-enable.", "connectionAutoDisabledReason.DISABLE_CONNECTION_IF_ANY_SCHEMA_CHANGES": "Airbyte disabled the connection due to a detected schema change. Approve the schema changes in the Schema tab to re-enable.", - "connectionAutoDisabledReason.INVALID_CREDIT_BALANCE": "Airbyte disabled the connection due to a billing issue. Resolve your billing status to re-enable the connection.", + "connectionAutoDisabledReason.INVALID_CREDIT_BALANCE": "Airbyte disabled the connection due to a billing issue.", "connectionAutoDisabledReason.CONNECTOR_NOT_SUPPORTED": "Airbyte disabled the connection due to an unsupported connector version.", - "connectionAutoDisabledReason.WORKSPACE_IS_DELINQUENT": "Airbyte disabled the connection due to a billing issue. Resolve your billing status to re-enable the connection.", + "connectionAutoDisabledReason.WORKSPACE_IS_DELINQUENT": "Airbyte disabled the connection due to a billing issue.", "connectionAutoDisabledReason.SCHEMA_CHANGE_AUTO_PROPAGATE": "Airbyte disabled the connection automatically due to schema changes.", + "connectionAutoDisabledReason.INVOICE_MARKED_UNCOLLECTIBLE": "Airbyte disabled the connection due to a billing issue.", + "connectionAutoDisabledReason.INVALID_PAYMENT_METHOD": "Airbyte disabled the connection due to a billing issue.", "connection.actions.error": "There was an error starting this job. Please try again.", "connection.actions.refreshData": "Refresh your data", @@ -1050,6 +1124,9 @@ "settings.organization.billing.billingInformation": "Billing information", "settings.organization.billing.billingInformationError": "Error loading billing information", "settings.organization.billing.update": "Update", + "settings.organization.billing.plan": "Plan", + "settings.organization.billing.planError": "Error loading plan information", + "settings.organization.billing.subscriptionCancelDate": "Subscription ends on", "settings.organization.billing.accountBalance": "Account balance", "settings.organization.billing.accountBalanceError": "Error loading account balance", "settings.organization.billing.remainingCreditsBanner": "You have {amount} remaining credits.", @@ -1329,26 +1406,7 @@ "connector.sort.success": "Sync success rate", "connector.sort.usage": "Usage", - "credits.credits": "Credits", - "credits.whatAreCredits": "What are credits?", - "credits.buyCredits": "Buy credits", "credits.talkToSales": "Talk to Sales", - "credits.remainingCredits": "Remaining credits", - "credits.creditUsage": "Credit Usage", - "credits.autoRechargeEnabled": "Credits will automatically be added when they run low. Contact us to change this behavior.", - "credits.minCreditsError": "The minimum credit purchase is {minimum} credits.", - "credits.maxCreditsError": "The maximum credit purchase is {maximum} credits.", - "credits.checkoutModalTitle": "Select credit quantity", - "credits.checkout": "Checkout", - "credits.checkout.creditExpiration": "Credits are valid for 12 months. Visit our pricing page to learn more.", - "credits.aboveMaxCredits": "To buy large quantities of credits, talk to our Sales team.", - "credits.numberOfCredits": "Number of credits", - "credits.pricePerCredit": "Price per credit", - "credits.totalPrice": "Total", - "credits.pricePlusTaxes": "{price} + taxes", - "credits.unlockDiscount": "Unlock a {discount} discount when buying at least {minimum,number} credits.", - "credits.noFractionalCredits": "Please enter a whole number of credits.", - "credits.noBillingAccount": "Your account is excluded from billing requirements and credits are not required.", "docs.notFoundError": "We were not able to receive docs. Please click the link above to open docs on our website", "docs.metrics.supportLevel.label": "Support Level", @@ -1918,20 +1976,31 @@ "jobHistory.logs.title": "Logs: {connectionName}", "jobHistory.logs.noLogs": "No logs", + "jobHistory.logs.loadingJob": "Loading job...", + "jobHistory.logs.loadingAttempt": "Loading attempt...", + "jobHistory.logs.noLogsFound": "No logs found for this job.", "jobHistory.logs.noAttempts": "An unknown error prevented this job from generating any attempts. No logs are available.", "jobHistory.logs.nextMatchLabel": "Jump to next match", "jobHistory.logs.previousMatchLabel": "Jump to previous match", "jobHistory.logs.downloadLogs": "Download logs", "jobHistory.logs.attemptLabel": "Attempt {attemptNumber} of {totalAttempts}", + "jobHistory.logs.moreDetails": "More details", "jobHistory.logs.logDownloadPending": "Downloading logs for job {jobId}…", "jobHistory.logs.logDownloadFailed": "Failed to download logs for job {jobId}.", "jobHistory.logs.searchPlaceholder": "Search logs", - "jobHistory.logs.logOrigin.all": "All logs", - "jobHistory.logs.logOrigin.source": "source", - "jobHistory.logs.logOrigin.destination": "destination", - "jobHistory.logs.logOrigin.other": "other", - "jobHistory.logs.logOrigin.platform": "platform", - "jobHistory.logs.logOrigin.replication-orchestrator": "replication-orchestrator", + "jobHistory.logs.logLevels": "Log levels", + "jobHistory.logs.logSources": "Log sources", + "jobHistory.logs.logSource.all": "All logs", + "jobHistory.logs.logSource.source": "source", + "jobHistory.logs.logSource.destination": "destination", + "jobHistory.logs.logSource.other": "other", + "jobHistory.logs.logSource.platform": "platform", + "jobHistory.logs.logSource.replication-orchestrator": "replication-orchestrator", + "jobHistory.logs.logLevel.debug": "DEBUG", + "jobHistory.logs.logLevel.info": "INFO", + "jobHistory.logs.logLevel.warn": "WARN", + "jobHistory.logs.logLevel.error": "ERROR", + "jobHistory.logs.logLevel.trace": "TRACE", "jobHistory.logs.logOrigins": "Log origins ({originsSelected} / {totalOrigins})", "jobHistory.copyLinkToJob": "Copy link to job", "jobHistory.copyLinkToJob.success": "Link copied to clipboard", @@ -2059,7 +2128,6 @@ "credits.date": "Date", "credits.amount": "Credits", - "credits.billing": "Billing", "credits.connection": "Connection", "credits.source": "Source", "credits.destination": "Destination", @@ -2069,27 +2137,15 @@ "credits.internalUsage": "Free (internal)", "credits.billedCost": "Billed", "credits.totalInLegend": "{total} total", - "credits.stripePortalLink": "Invoice history", "credits.workspace.deleted": "This workspace has been deleted", "credits.connection.deleted": "This connection has been deleted", "credits.timePeriod": "Time period", "credits.l24HourCredits": "Credit usage shown in the last 24 hours may be incomplete", - "credits.loadingCreditsUsage": "Loading credits usage …", - "credits.totalCreditsUsage": "Total credits usage", - "credits.usagePerConnection": "Usage per connection", - "credits.whatAre": "What are credits?", "credits.usage": "Usage", - "credits.noData": "You have no credits usage data for this period. Sync a connection to get started!", - "credits.creditsProblem": "You’re out of credits! To set up connections and run syncs, add credits.", - "credits.emailVerificationRequired": "You need to verify your email address before you can buy credits.", "credits.emailVerification.resendConfirmation": "We sent you a new verification link.", "credits.emailVerification.resendConfirmationError": "There was an error sending the verification link. Please try again.", - "credits.emailVerification.resend": "Send verification link", - "credits.lowBalance": "Your credit balance is low. Buy more credits to prevent your connections from being disabled or enroll in auto-recharge.", - "credits.zeroBalance": "All your connections have been disabled because your credit balance is 0. Buy credits or enroll in auto-recharge to enable your data to sync.", - "billing.pbaBillingActive": "This workspace is part of the {organizationName} organization and is billed on the organization level. For further billing questions, please contact us.", "billing.banners.manualPaymentStatus": "Reach out to Sales if you have any questions about your plan.", "billing.banners.manualPaymentStatusFree": "Your account will not be billed for usage. Please reach out to your contact at Airbyte for any questions.", "billing.banners.manualPaymentStatusInternal": "This is an Airbyte internal organization. Usage will not be billed.", @@ -2105,16 +2161,10 @@ "billing.banners.postTrial": "Enter payment details to keep your syncs running.", "billing.banners.postTrialWithLink": "Enter payment details to keep your syncs running.", - "trial.alertMessage": "You are using a trial of Airbyte. Your trial ends in {remainingDays, plural, one {# day} other {# days}}. Purchase now", - - "trial.preTrialAlertMessage": "Your 14-day trial of Airbyte will start once your first sync has succeeded.", - "inviteUsersHint.message": "Need help from a teammate to set up the {connector}?", "inviteUsersHint.cta": "Invite users", - "sidebar.credits": "Credits", "sidebar.billing": "Billing", - "sidebar.billingMigrationMaintenance": "Airbyte billing is currently undergoing maintenance. Please check our status page for more details.", "workspace.adminWorkspaceWarning": "Admin", "workspace.adminWorkspaceWarningTooltip": "You are not a member of this workspace. Be careful when making changes!", @@ -2154,5 +2204,11 @@ "failureMessage.type.error": "Failure in {origin}", "failureMessage.type.warning": "Warning from {origin}", - "failureMessage.label": "{type} {message}" + "failureMessage.label": "{type} {message}", + + "jinjaInput.suggest.examples": "{count, plural, one {Example} other {Examples}}", + "jinjaInput.suggest.userInput.description": "Inserts a reference to the user-provided value for the `{label}` user input.", + "jinjaInput.suggest.userInput.currentTestingValue": "Current Testing Value", + "jinjaInput.suggest.userInput.createNew.label": "Create new user input…", + "jinjaInput.suggest.userInput.createNew.doc": "Create a new user input and insert a reference to it here." } diff --git a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.test.tsx b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.test.tsx deleted file mode 100644 index e684d22edb9..00000000000 --- a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.test.tsx +++ /dev/null @@ -1,99 +0,0 @@ -import { render } from "@testing-library/react"; -import { Suspense } from "react"; - -import { TestWrapper } from "test-utils"; - -import { - CloudWorkspaceRead, - CloudWorkspaceReadCreditStatus as CreditStatus, - CloudWorkspaceReadWorkspaceTrialStatus as WorkspaceTrialStatus, -} from "core/api/types/CloudApi"; -import { I18nProvider } from "core/services/i18n"; - -import { WorkspaceStatusBanner } from "./WorkspaceStatusBanner"; - -const defaultCloudWorkspace = { workspaceId: "123" }; - -const renderWorkspaceBanner = (cloudWorkspace: CloudWorkspaceRead) => { - return render( - - - - - - - - ); -}; - -describe("WorkspaceCreditsBanner", () => { - it("should render credits problem banner for credits problem pre-trial", () => { - const cloudWorkspace = { - ...defaultCloudWorkspace, - workspaceTrialStatus: WorkspaceTrialStatus.pre_trial, - creditStatus: CreditStatus.negative_beyond_grace_period, - }; - - const { getByText } = renderWorkspaceBanner(cloudWorkspace); - expect(getByText(/You’re out of credits!/)).toBeTruthy(); - }); - it("should render credits problem banner for credits problem during trial", () => { - const cloudWorkspace = { - ...defaultCloudWorkspace, - workspaceTrialStatus: WorkspaceTrialStatus.in_trial, - creditStatus: CreditStatus.negative_beyond_grace_period, - }; - - const { getByText } = renderWorkspaceBanner(cloudWorkspace); - expect(getByText(/You’re out of credits!/)).toBeTruthy(); - }); - - it("should render credits problem banner for credits problem after trial", () => { - const cloudWorkspace = { - ...defaultCloudWorkspace, - workspaceTrialStatus: WorkspaceTrialStatus.out_of_trial, - creditStatus: CreditStatus.negative_beyond_grace_period, - }; - - const { getByText } = renderWorkspaceBanner(cloudWorkspace); - expect(getByText(/You’re out of credits!/)).toBeTruthy(); - }); - - it("should render pre-trial banner if user's trial has not started", () => { - const cloudWorkspace = { - ...defaultCloudWorkspace, - creditStatus: CreditStatus.positive, - workspaceTrialStatus: WorkspaceTrialStatus.pre_trial, - trialExpiryTimestamp: undefined, - }; - - const { getByText } = renderWorkspaceBanner(cloudWorkspace); - - expect(getByText(/Your 14-day trial of Airbyte will start/)).toBeTruthy(); - }); - - it("should render trial banner if user is in trial", () => { - // create a date that is 1 day in the future - const oneDayFromNow = Date.now() + 60_000 * 60 * 24; - const cloudWorkspace = { - ...defaultCloudWorkspace, - creditStatus: CreditStatus.positive, - workspaceTrialStatus: WorkspaceTrialStatus.in_trial, - trialExpiryTimestamp: oneDayFromNow, - }; - - const { getByText } = renderWorkspaceBanner(cloudWorkspace); - expect(getByText(/You are using a trial of Airbyte/)).toBeTruthy(); - expect(getByText(/1 day/)).toBeTruthy(); - }); - it("should render an empty div if user is out of trial", () => { - const cloudWorkspace = { - ...defaultCloudWorkspace, - creditStatus: CreditStatus.positive, - workspaceTrialStatus: WorkspaceTrialStatus.out_of_trial, - }; - - const { queryByTestId } = renderWorkspaceBanner(cloudWorkspace); - expect(queryByTestId("workspace-status-banner")).toBeNull(); - }); -}); diff --git a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.tsx b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.tsx deleted file mode 100644 index b762870a52c..00000000000 --- a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.tsx +++ /dev/null @@ -1,80 +0,0 @@ -import { useMemo } from "react"; -import { FormattedMessage } from "react-intl"; - -import { AlertBanner } from "components/ui/Banner/AlertBanner"; -import { Link } from "components/ui/Link"; - -import { - CloudWorkspaceRead, - CloudWorkspaceReadCreditStatus as CreditStatus, - CloudWorkspaceReadWorkspaceTrialStatus as WorkspaceTrialStatus, -} from "core/api/types/CloudApi"; -import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; - -interface WorkspaceStatusBannerProps { - cloudWorkspace: CloudWorkspaceRead; -} -export const WorkspaceStatusBanner: React.FC = ({ cloudWorkspace }) => { - const negativeCreditStatus = useMemo(() => { - // these remain the same regardless of the new trial policy - return ( - cloudWorkspace.creditStatus && - (cloudWorkspace.creditStatus === CreditStatus.negative_beyond_grace_period || - cloudWorkspace.creditStatus === CreditStatus.negative_max_threshold || - cloudWorkspace.creditStatus === CreditStatus.negative_within_grace_period) - ); - }, [cloudWorkspace.creditStatus]); - - const workspaceCreditsBannerContent = useMemo(() => { - if (negativeCreditStatus) { - return ( - {content}, - }} - /> - ); - } - - if (cloudWorkspace.workspaceTrialStatus === WorkspaceTrialStatus.pre_trial) { - return ; - } - - if (cloudWorkspace.workspaceTrialStatus === WorkspaceTrialStatus.in_trial) { - const { trialExpiryTimestamp } = cloudWorkspace; - - // calculate difference between timestamp (in epoch milliseconds) and now (in epoch milliseconds) - const trialRemainingMilliseconds = trialExpiryTimestamp ? trialExpiryTimestamp - Date.now() : 0; - if (trialRemainingMilliseconds < 0) { - return null; - } - // calculate days (rounding up if decimal) - const trialRemainingDays = Math.ceil(trialRemainingMilliseconds / (1000 * 60 * 60 * 24)); - - return ( - {content}, - }} - /> - ); - } - - return null; - }, [cloudWorkspace, negativeCreditStatus]); - - return ( - <> - {!!workspaceCreditsBannerContent && ( - - )} - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.test.tsx b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.test.tsx index b829151d230..e185f30fb2f 100644 --- a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.test.tsx +++ b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.test.tsx @@ -18,11 +18,6 @@ jest.mock("area/workspace/utils", () => ({ useCurrentWorkspaceLink: jest.fn().mockReturnValue((link: string) => link), })); -// We just mock out the legacy workspace banner, since that file has its own tests -jest.mock("./LegacyStatusBanner/WorkspaceStatusBanner", () => ({ - WorkspaceStatusBanner: () =>
, -})); - jest.mock("core/api/cloud", () => ({ useGetCloudWorkspaceAsync: jest.fn().mockReturnValue({ workspaceId: "workspace-1", @@ -39,7 +34,6 @@ const mockOrgInfo = (billing: WorkspaceOrganizationInfoReadBilling | undefined) organizationId: "org-1", organizationName: "org name", sso: false, - pba: false, billing, }); }; @@ -62,12 +56,6 @@ const mockGeneratedIntent = (options: { canViewTrialStatus: boolean; canManageOr }; describe("StatusBanner", () => { - it("should render legacy banner if no billing information is available", async () => { - mockOrgInfo(undefined); - const wrapper = await render(); - expect(wrapper.getByTestId("mockLegacyWorkspaceBanner")).toBeInTheDocument(); - }); - it("should render nothing with paymentStatus=OKAY and not in trial", async () => { mockOrgInfo({ paymentStatus: "okay" }); mockTrialStatus({ trialStatus: "post_trial" }); diff --git a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.tsx b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.tsx index 4270a8bfd42..0926e413010 100644 --- a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.tsx +++ b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.tsx @@ -2,23 +2,8 @@ import React from "react"; import { AlertBanner } from "components/ui/Banner/AlertBanner"; -import { useCurrentWorkspaceId } from "area/workspace/utils"; -import { useCurrentOrganizationInfo } from "core/api"; -import { useGetCloudWorkspaceAsync } from "core/api/cloud"; -import { useExperiment } from "hooks/services/Experiment"; - -import { WorkspaceStatusBanner as LegacyWorkspaceStatusBanner } from "./LegacyStatusBanner/WorkspaceStatusBanner"; import { useBillingStatusBanner } from "../../utils/useBillingStatusBanner"; -const LegacyStatusBanner: React.FC = () => { - const workspaceId = useCurrentWorkspaceId(); - const isBillingMigrationMaintenance = useExperiment("billing.migrationMaintenance"); - const cloudWorkspace = useGetCloudWorkspaceAsync(workspaceId); - return cloudWorkspace && !isBillingMigrationMaintenance ? ( - - ) : null; -}; - const WorkspaceStatusBanner: React.FC = () => { const statusBanner = useBillingStatusBanner("top_level"); return statusBanner ? ( @@ -27,6 +12,9 @@ const WorkspaceStatusBanner: React.FC = () => { }; export const StatusBanner: React.FC = () => { - const { billing } = useCurrentOrganizationInfo(); - return {billing ? : }; + return ( + + + + ); }; diff --git a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/index.ts b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/index.ts new file mode 100644 index 00000000000..d8526a76acf --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/index.ts @@ -0,0 +1 @@ +export { StatusBanner } from "./StatusBanner"; diff --git a/airbyte-webapp/src/packages/cloud/area/billing/utils/useBillingStatusBanner.tsx b/airbyte-webapp/src/packages/cloud/area/billing/utils/useBillingStatusBanner.tsx index a93b1fee9ce..b4bc20b94e7 100644 --- a/airbyte-webapp/src/packages/cloud/area/billing/utils/useBillingStatusBanner.tsx +++ b/airbyte-webapp/src/packages/cloud/area/billing/utils/useBillingStatusBanner.tsx @@ -7,7 +7,7 @@ import { useCurrentWorkspaceLink } from "area/workspace/utils"; import { useCurrentOrganizationInfo, useOrganizationTrialStatus } from "core/api"; import { links } from "core/utils/links"; import { Intent, useGeneratedIntent } from "core/utils/rbac"; -import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; +import { CloudSettingsRoutePaths } from "packages/cloud/views/settings/routePaths"; import { RoutePaths } from "pages/routePaths"; interface BillingStatusBanner { @@ -89,7 +89,7 @@ export const useBillingStatusBanner = (context: "top_level" | "billing_page"): B }, { lnk: (node: React.ReactNode) => ( - {node} + {node} ), } ), @@ -111,7 +111,7 @@ export const useBillingStatusBanner = (context: "top_level" | "billing_page"): B ? Math.max(dayjs(billing.gracePeriodEndsAt * 1000).diff(dayjs(), "days"), 0) : 0, lnk: (node: React.ReactNode) => ( - {node} + {node} ), } ), @@ -148,7 +148,7 @@ export const useBillingStatusBanner = (context: "top_level" | "billing_page"): B { days: Math.max(dayjs(trialStatus.trialEndsAt).diff(dayjs(), "days"), 0), lnk: (node: React.ReactNode) => ( - {node} + {node} ), } ), @@ -168,7 +168,7 @@ export const useBillingStatusBanner = (context: "top_level" | "billing_page"): B }, { lnk: (node: React.ReactNode) => ( - {node} + {node} ), } ), diff --git a/airbyte-webapp/src/packages/cloud/views/billing/useRedirectToCustomerPortal.ts b/airbyte-webapp/src/packages/cloud/area/billing/utils/useRedirectToCustomerPortal.ts similarity index 94% rename from airbyte-webapp/src/packages/cloud/views/billing/useRedirectToCustomerPortal.ts rename to airbyte-webapp/src/packages/cloud/area/billing/utils/useRedirectToCustomerPortal.ts index 4ed003f2306..0dd7e9207dc 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/useRedirectToCustomerPortal.ts +++ b/airbyte-webapp/src/packages/cloud/area/billing/utils/useRedirectToCustomerPortal.ts @@ -6,14 +6,14 @@ import { useCurrentWorkspace, useGetCustomerPortalUrl } from "core/api"; import { CustomerPortalRequestBodyFlow } from "core/api/types/AirbyteClient"; import { trackError } from "core/utils/datadog"; import { useNotificationService } from "hooks/services/Notification"; -import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; +import { CloudSettingsRoutePaths } from "packages/cloud/views/settings/routePaths"; import { RoutePaths } from "pages/routePaths"; export const useRedirectToCustomerPortal = (flow: CustomerPortalRequestBodyFlow) => { const [redirecting, setRedirecting] = useState(false); const { organizationId } = useCurrentWorkspace(); const createLink = useCurrentWorkspaceLink(); - const pathToBilling = createLink(`/${RoutePaths.Settings}/${CloudRoutes.Billing}`); + const pathToBilling = createLink(`/${RoutePaths.Settings}/${CloudSettingsRoutePaths.Billing}`); const { mutateAsync: getCustomerPortalUrl, isLoading: isCustomerPortalUrlLoading } = useGetCustomerPortalUrl(); const { registerNotification, unregisterNotificationById } = useNotificationService(); const { formatMessage } = useIntl(); diff --git a/airbyte-webapp/src/packages/cloud/area/billing/utils/useShowBillingPage.ts b/airbyte-webapp/src/packages/cloud/area/billing/utils/useShowBillingPage.ts deleted file mode 100644 index 5c9904cece8..00000000000 --- a/airbyte-webapp/src/packages/cloud/area/billing/utils/useShowBillingPage.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { useCurrentOrganizationInfo } from "core/api"; - -export const useShowBillingPageV2 = () => { - const { billing } = useCurrentOrganizationInfo(); - return !!billing; -}; diff --git a/airbyte-webapp/src/packages/cloud/cloudRoutePaths.tsx b/airbyte-webapp/src/packages/cloud/cloudRoutePaths.tsx index 9bc4c12489b..0a386499844 100644 --- a/airbyte-webapp/src/packages/cloud/cloudRoutePaths.tsx +++ b/airbyte-webapp/src/packages/cloud/cloudRoutePaths.tsx @@ -2,7 +2,6 @@ export enum CloudRoutes { Root = "/", AcceptInvitation = "/accept-invite", Metrics = "metrics", - Billing = "billing", UpcomingFeatures = "upcoming-features", // Auth routes diff --git a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx index df51fc959d6..54c55666ca3 100644 --- a/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx +++ b/airbyte-webapp/src/packages/cloud/cloudRoutes.tsx @@ -15,7 +15,7 @@ import { FeatureItem, useFeature } from "core/services/features"; import { isCorporateEmail } from "core/utils/freeEmailProviders"; import { Intent, useGeneratedIntent, useIntent } from "core/utils/rbac"; import { storeUtmFromQuery } from "core/utils/utmStorage"; -import { useExperiment, useExperimentContext } from "hooks/services/Experiment"; +import { useExperimentContext } from "hooks/services/Experiment"; import { useBuildUpdateCheck } from "hooks/services/useBuildUpdateCheck"; import { useQuery } from "hooks/useQuery"; import ConnectorBuilderRoutes from "pages/connectorBuilder/ConnectorBuilderRoutes"; @@ -29,28 +29,26 @@ import { GeneralOrganizationSettingsPage } from "pages/SettingsPage/pages/Organi import { OrganizationMembersPage } from "pages/SettingsPage/pages/Organization/OrganizationMembersPage"; import { AcceptInvitation } from "./AcceptInvitation"; -import { useShowBillingPageV2 } from "./area/billing/utils/useShowBillingPage"; import { CloudRoutes } from "./cloudRoutePaths"; import { LDExperimentServiceProvider } from "./services/thirdParty/launchdarkly"; import { SSOBookmarkPage } from "./views/auth/SSOBookmarkPage"; import { SSOIdentifierPage } from "./views/auth/SSOIdentifierPage"; -import { OrganizationBillingPage } from "./views/billing/OrganizationBillingPage"; import { DbtCloudSettingsView } from "./views/settings/integrations/DbtCloudSettingsView"; import { CloudSettingsRoutePaths } from "./views/settings/routePaths"; import { AccountSettingsView } from "./views/users/AccountSettingsView"; import { ApplicationSettingsView } from "./views/users/ApplicationSettingsView/ApplicationSettingsView"; import { DataResidencyView } from "./views/workspaces/DataResidencyView"; import { WorkspaceSettingsView } from "./views/workspaces/WorkspaceSettingsView"; -import { WorkspaceUsagePage } from "./views/workspaces/WorkspaceUsagePage"; const LoginPage = React.lazy(() => import("./views/auth/LoginPage")); const SignupPage = React.lazy(() => import("./views/auth/SignupPage")); const CloudMainView = React.lazy(() => import("packages/cloud/views/layout/CloudMainView")); const CloudWorkspacesPage = React.lazy(() => import("packages/cloud/views/workspaces")); const AuthLayout = React.lazy(() => import("packages/cloud/views/auth")); -const BillingPage = React.lazy(() => import("packages/cloud/views/billing")); +const OrganizationBillingPage = React.lazy(() => import("packages/cloud/views/billing/OrganizationBillingPage")); const OrganizationUsagePage = React.lazy(() => import("packages/cloud/views/billing/OrganizationUsagePage")); const UpcomingFeaturesPage = React.lazy(() => import("packages/cloud/views/UpcomingFeaturesPage")); +const WorkspaceUsagePage = React.lazy(() => import("packages/cloud/views/workspaces/WorkspaceUsagePage")); const ConnectionsRoutes = React.lazy(() => import("pages/connections/ConnectionsRoutes")); @@ -76,8 +74,6 @@ const MainRoutes: React.FC = () => { const canViewOrgSettings = useIntent("ViewOrganizationSettings", { organizationId: workspace.organizationId }); const canManageOrganizationBilling = useGeneratedIntent(Intent.ManageOrganizationBilling); const canViewOrganizationUsage = useGeneratedIntent(Intent.ViewOrganizationUsage); - const showBillingPageV2 = useShowBillingPageV2(); - const isBillingMigrationMaintenance = useExperiment("billing.migrationMaintenance"); useExperimentContext("organization", workspace.organizationId); @@ -129,25 +125,22 @@ const MainRoutes: React.FC = () => { {supportsCloudDbtIntegration && ( } /> )} - {showBillingPageV2 && } />} + } /> {canViewOrgSettings && ( <> } /> } /> )} - {canManageOrganizationBilling && showBillingPageV2 && ( + {canManageOrganizationBilling && ( } /> )} - {canViewOrganizationUsage && showBillingPageV2 && ( + {canViewOrganizationUsage && ( } /> )} } /> } /> - {!showBillingPageV2 && !isBillingMigrationMaintenance && ( - } /> - )} } /> } /> } /> diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/BillingPage.module.scss b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/BillingPage.module.scss deleted file mode 100644 index c5eeee306d5..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/BillingPage.module.scss +++ /dev/null @@ -1,23 +0,0 @@ -@use "scss/variables"; -@forward "src/components/ui/Button/Button.module.scss"; - -.content { - min-width: variables.$min-width-wide-table-container; -} - -.creditUsageLoading { - display: flex; - flex-direction: column; - gap: variables.$spacing-md; - justify-content: center; - align-items: center; - margin: variables.$spacing-2xl auto; -} - -.stripePortalLink { - text-decoration: none; -} - -.filesIcon { - max-height: 100%; -} diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/BillingPage.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/BillingPage.tsx deleted file mode 100644 index d4309269b43..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/BillingPage.tsx +++ /dev/null @@ -1,81 +0,0 @@ -import classnames from "classnames"; -import React from "react"; -import { FormattedMessage } from "react-intl"; - -import { HeadTitle } from "components/HeadTitle"; -import { MainPageWithScroll } from "components/MainPageWithScroll"; -import { FlexContainer } from "components/ui/Flex"; -import { Heading } from "components/ui/Heading"; -import { PageHeader } from "components/ui/PageHeader"; -import { Spinner } from "components/ui/Spinner"; -import { Text } from "components/ui/Text"; - -import { useCurrentOrganizationInfo } from "core/api"; -import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; -import { links } from "core/utils/links"; - -import styles from "./BillingPage.module.scss"; -import { CreditsUsage } from "./components/CreditsUsage"; -import { CreditsUsageContextProvider } from "./components/CreditsUsageContext"; -import { PbaBillingBanner } from "./components/PbaBillingBanner"; -import { RemainingCredits } from "./components/RemainingCredits"; -import FilesIcon from "./filesIcon.svg?react"; - -const StripePortalLink: React.FC = () => { - return ( - - - - - - - ); -}; - -export const BillingPage: React.FC = () => { - useTrackPage(PageTrackingCodes.CREDITS); - const organization = useCurrentOrganizationInfo(); - - return ( - } - pageTitle={ - - - - } - endComponent={} - /> - } - > - - {organization?.pba ? ( - - ) : ( - - )} - - - - - -
- } - > - - - - -
- - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.test.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.test.tsx deleted file mode 100644 index f42962ccfd9..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.test.tsx +++ /dev/null @@ -1,132 +0,0 @@ -import { getByTestId, queryByTestId } from "@testing-library/react"; - -import { mocked, render } from "test-utils"; - -import { useGetCloudWorkspace } from "core/api/cloud"; -import { CloudWorkspaceReadCreditStatus, CloudWorkspaceReadWorkspaceTrialStatus } from "core/api/types/CloudApi"; -import { useExperiment } from "hooks/services/Experiment"; - -import { BillingBanners } from "./BillingBanners"; - -jest.mock("core/api/cloud", () => ({ - useGetCloudWorkspace: jest.fn(), -})); - -jest.mock("core/api", () => ({ - useCurrentWorkspace: jest.fn().mockReturnValue({ - workspaceId: "123", - }), -})); - -jest.mock("hooks/services/Experiment", () => ({ - useExperiment: jest.fn().mockReturnValue(false), -})); - -jest.mock("core/services/auth", () => ({ - useAuthService: jest.fn().mockReturnValue({ - emailVerified: true, - sendEmailVerification: jest.fn(), - }), -})); - -function mockAutoRechargeExperiment(enabled: boolean) { - mocked(useExperiment).mockImplementation((experiment) => { - if (experiment === "billing.autoRecharge") { - return enabled; - } - - throw new Error("Unexpected experiment"); - }); -} - -function mockWorkspace( - credits: number, - creditStatus: CloudWorkspaceReadCreditStatus, - trialStatus: CloudWorkspaceReadWorkspaceTrialStatus -) { - mocked(useGetCloudWorkspace).mockReturnValue({ - workspaceId: "123", - remainingCredits: credits, - creditStatus, - workspaceTrialStatus: trialStatus, - }); -} - -describe("BillingBanners", () => { - beforeEach(() => { - mockAutoRechargeExperiment(false); - }); - - describe("auto recharge banner", () => { - it("should show auto recharge enabled banner", async () => { - mockAutoRechargeExperiment(true); - mockWorkspace( - 500, - CloudWorkspaceReadCreditStatus.positive, - CloudWorkspaceReadWorkspaceTrialStatus.credit_purchased - ); - const banners = await render(); - expect(getByTestId(banners.baseElement, "autoRechargeEnabledBanner")).toBeVisible(); - }); - - it("should only show auto recharge banner even on low credit", async () => { - mockAutoRechargeExperiment(true); - mockWorkspace( - 5, - CloudWorkspaceReadCreditStatus.positive, - CloudWorkspaceReadWorkspaceTrialStatus.credit_purchased - ); - const banners = await render(); - expect(queryByTestId(banners.baseElement, "autoRechargeEnabledBanner")).toBeInTheDocument(); - expect(queryByTestId(banners.baseElement, "lowCreditsBanner")).not.toBeInTheDocument(); - expect(queryByTestId(banners.baseElement, "noCreditsBanner")).not.toBeInTheDocument(); - }); - - it("should only show auto recharge banner even on no credits", async () => { - mockAutoRechargeExperiment(true); - mockWorkspace( - -2, - CloudWorkspaceReadCreditStatus.negative_beyond_grace_period, - CloudWorkspaceReadWorkspaceTrialStatus.credit_purchased - ); - const banners = await render(); - expect(queryByTestId(banners.baseElement, "autoRechargeEnabledBanner")).toBeInTheDocument(); - expect(queryByTestId(banners.baseElement, "lowCreditsBanner")).not.toBeInTheDocument(); - expect(queryByTestId(banners.baseElement, "noCreditsBanner")).not.toBeInTheDocument(); - }); - }); - - describe("no billing account", () => { - it("should show only no billing account banner", async () => { - mockWorkspace(-5, CloudWorkspaceReadCreditStatus.positive, CloudWorkspaceReadWorkspaceTrialStatus.out_of_trial); - const banners = await render(); - expect(queryByTestId(banners.baseElement, "noBillingAccount")).toBeInTheDocument(); - expect(queryByTestId(banners.baseElement, "lowCreditsBanner")).not.toBeInTheDocument(); - expect(queryByTestId(banners.baseElement, "noCreditsBanner")).not.toBeInTheDocument(); - }); - }); - - describe("low credit warnings", () => { - it("should show low credit banner when credits are low", async () => { - mockWorkspace( - 5, - CloudWorkspaceReadCreditStatus.positive, - CloudWorkspaceReadWorkspaceTrialStatus.credit_purchased - ); - const banners = await render(); - expect(queryByTestId(banners.baseElement, "lowCreditsBanner")).toBeInTheDocument(); - expect(queryByTestId(banners.baseElement, "noCreditsBanner")).not.toBeInTheDocument(); - }); - - it("should show no credit banner when credits are negative", async () => { - mockWorkspace( - 0, - CloudWorkspaceReadCreditStatus.negative_within_grace_period, - CloudWorkspaceReadWorkspaceTrialStatus.credit_purchased - ); - const banners = await render(); - expect(queryByTestId(banners.baseElement, "noCreditsBanner")).toBeInTheDocument(); - expect(queryByTestId(banners.baseElement, "lowCreditsBanner")).not.toBeInTheDocument(); - }); - }); -}); diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.tsx deleted file mode 100644 index bf1c26cd508..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/BillingBanners.tsx +++ /dev/null @@ -1,107 +0,0 @@ -import { FormattedMessage, useIntl } from "react-intl"; - -import { FlexContainer } from "components/ui/Flex"; -import { ExternalLink, Link } from "components/ui/Link"; -import { Message } from "components/ui/Message"; - -import { useCurrentWorkspace } from "core/api"; -import { useGetCloudWorkspace, useResendEmailVerification } from "core/api/cloud"; -import { CloudWorkspaceReadCreditStatus, CloudWorkspaceReadWorkspaceTrialStatus } from "core/api/types/CloudApi"; -import { useAuthService } from "core/services/auth"; -import { links } from "core/utils/links"; -import { useExperiment } from "hooks/services/Experiment"; - -const LOW_BALANCE_CREDIT_THRESHOLD = 20; - -export const EmailVerificationHint: React.FC = () => { - const { mutateAsync: resendEmailVerification, isLoading } = useResendEmailVerification(); - - return ( - } - actionBtnText={} - actionBtnProps={{ isLoading }} - onAction={resendEmailVerification} - /> - ); -}; - -const AutoRechargeEnabledBanner: React.FC = () => ( - ( - - {node} - - ), - }} - /> - } - /> -); - -const LowCreditBalanceHint: React.FC = () => { - const { formatMessage } = useIntl(); - const currentWorkspace = useCurrentWorkspace(); - const cloudWorkspace = useGetCloudWorkspace(currentWorkspace.workspaceId); - const credits = cloudWorkspace.remainingCredits ?? 0; - - if (credits < 0 && cloudWorkspace.creditStatus === CloudWorkspaceReadCreditStatus.positive) { - // Having a positive credit status while credits amount are negative (and no longer in pre_trial) - // means this workspace is an internal workspace that isn't billed - return ; - } - - if (cloudWorkspace.workspaceTrialStatus === CloudWorkspaceReadWorkspaceTrialStatus.pre_trial) { - // If we're pre trial we don't have any credits yet - return null; - } - - const messageParams = { - lnk: (node: React.ReactNode) => ( - - {node} - - ), - }; - - if (credits <= 0) { - return ( - - ); - } - - if (credits < LOW_BALANCE_CREDIT_THRESHOLD) { - return ( - - ); - } - - return null; -}; - -export const BillingBanners: React.FC = () => { - const { emailVerified } = useAuthService(); - - const isAutoRechargeEnabled = useExperiment("billing.autoRecharge"); - - return ( - - {!emailVerified && } - {isAutoRechargeEnabled ? : } - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CheckoutCreditsModal.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CheckoutCreditsModal.tsx deleted file mode 100644 index 6ba4b7faad4..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CheckoutCreditsModal.tsx +++ /dev/null @@ -1,215 +0,0 @@ -import { useFormState, useWatch } from "react-hook-form"; -import { FormattedMessage, FormattedNumber, useIntl } from "react-intl"; -import * as yup from "yup"; -import { SchemaOf } from "yup"; - -import { Form, FormControl } from "components/forms"; -import { ModalFormSubmissionButtons } from "components/forms/ModalFormSubmissionButtons"; -import { Box } from "components/ui/Box"; -import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; -import { ExternalLink, Link } from "components/ui/Link"; -import { Message } from "components/ui/Message"; -import { ModalBody, ModalFooter } from "components/ui/Modal"; -import { Text } from "components/ui/Text"; - -import { useCurrentWorkspaceId } from "area/workspace/utils"; -import { useStripeCheckout } from "core/api/cloud"; -import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; -import { links } from "core/utils/links"; -import { ModalContentProps } from "hooks/services/Modal"; - -import { STRIPE_SUCCESS_QUERY } from "./RemainingCredits"; - -const DEFAULT_CREDITS = 200; -const MIN_CREDITS = 20; -const MAX_CREDITS = 6000; - -interface CreditsFormValues { - quantity: number; -} - -const getPrice = (quantity: number) => { - if (quantity >= 4800) { - return 2.08; - } - if (quantity >= 2200) { - return 2.27; - } - - return 2.5; -}; - -const PricePreview: React.FC = () => { - const { formatNumber } = useIntl(); - const quantity = useWatch({ name: "quantity" }); - return ( - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ); -}; - -interface DiscountMessageProps { - minimum: number; - maximum?: number; - discount: number; -} - -const DiscountMessage: React.FC = ({ maximum, minimum, discount }) => { - const { formatNumber } = useIntl(); - const { isValid } = useFormState(); - const quantity = useWatch({ name: "quantity" }); - const isActive = quantity >= minimum && isValid; - return ( - - - - - - - ); -}; - -const TalkToSalesBanner: React.FC = () => { - const quantity = useWatch({ name: "quantity" }); - - if (quantity <= MAX_CREDITS) { - return null; - } - - return ( - - ( - - {node} - - ), - }} - /> - } - /> - - ); -}; - -export const CheckoutCreditsModal: React.FC> = ({ onCancel }) => { - const { formatMessage } = useIntl(); - const { mutateAsync: createCheckout } = useStripeCheckout(); - const workspaceId = useCurrentWorkspaceId(); - const analytics = useAnalyticsService(); - - const startStripeCheckout = async (values: CreditsFormValues) => { - // Use the current URL as a success URL but attach the STRIPE_SUCCESS_QUERY to it - const successUrl = new URL(window.location.href); - successUrl.searchParams.set(STRIPE_SUCCESS_QUERY, "true"); - const { stripeUrl } = await createCheckout({ - workspaceId, - successUrl: successUrl.href, - cancelUrl: window.location.href, - stripeMode: "payment", - quantity: values.quantity, - }); - analytics.track(Namespace.CREDITS, Action.CHECKOUT_START, { - actionDescription: "Checkout Start", - }); - // Forward to stripe as soon as we created a checkout session successfully - window.location.assign(stripeUrl); - }; - - const creditsFormSchema: SchemaOf = yup.object({ - quantity: yup - .number() - .integer(formatMessage({ id: "credits.noFractionalCredits" })) - .transform((val) => (typeof val !== "number" || isNaN(val) ? 0 : val)) - .min(MIN_CREDITS, formatMessage({ id: "credits.minCreditsError" }, { minimum: MIN_CREDITS })) - .max(MAX_CREDITS, formatMessage({ id: "credits.maxCreditsError" }, { maximum: MAX_CREDITS })) - .required(), - }); - - return ( - - defaultValues={{ quantity: DEFAULT_CREDITS }} - schema={creditsFormSchema} - onSubmit={startStripeCheckout} - > - - - - - - - - - - {children} }} - /> - - - - - - - - - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsage.module.scss b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsage.module.scss deleted file mode 100644 index 92957120828..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsage.module.scss +++ /dev/null @@ -1,8 +0,0 @@ -@use "scss/variables"; - -.card { - min-height: 300px; - height: auto; - display: flex; - flex-direction: column; -} diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsage.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsage.tsx deleted file mode 100644 index 3fde7daf846..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsage.tsx +++ /dev/null @@ -1,54 +0,0 @@ -import React from "react"; -import { FormattedMessage } from "react-intl"; -import { useEffectOnce } from "react-use"; - -import { Box } from "components/ui/Box"; -import { Card } from "components/ui/Card"; -import { Heading } from "components/ui/Heading"; - -import { trackTiming } from "core/utils/datadog"; -import { UsagePerDayGraph } from "packages/cloud/area/billing/components/UsagePerDayGraph"; - -import styles from "./CreditsUsage.module.scss"; -import { useCreditsContext } from "./CreditsUsageContext"; -import { CreditsUsageFilters } from "./CreditsUsageFilters"; -import { EmptyState } from "./EmptyState"; -import { UsagePerConnectionTable } from "./UsagePerConnectionTable"; - -export const CreditsUsage: React.FC = () => { - const { freeAndPaidUsageByTimeChunk, hasFreeUsage, freeAndPaidUsageByConnection } = useCreditsContext(); - - useEffectOnce(() => { - trackTiming("CreditUsage"); - }); - - return ( - - - - - {freeAndPaidUsageByTimeChunk.length > 0 ? ( - <> - - - - - - - - - - - - - - - - - - ) : ( - - )} - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/EmptyState.module.scss b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/EmptyState.module.scss deleted file mode 100644 index 4160afe0746..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/EmptyState.module.scss +++ /dev/null @@ -1,4 +0,0 @@ -.container { - flex: 1; - margin-top: 80px; -} diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/EmptyState.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/EmptyState.tsx deleted file mode 100644 index 726bd54e1fa..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/EmptyState.tsx +++ /dev/null @@ -1,16 +0,0 @@ -import { FormattedMessage } from "react-intl"; - -import { FlexContainer } from "components/ui/Flex"; -import { Text } from "components/ui/Text"; - -import styles from "./EmptyState.module.scss"; - -export const EmptyState: React.FC = () => { - return ( - - - - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/PbaBillingBanner.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/PbaBillingBanner.tsx deleted file mode 100644 index 33cdbad0681..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/PbaBillingBanner.tsx +++ /dev/null @@ -1,28 +0,0 @@ -import { FormattedMessage } from "react-intl"; - -import { ExternalLink } from "components/ui/Link"; -import { Message } from "components/ui/Message"; - -interface PbaBillingBannerProps { - organizationName: string; -} - -export const PbaBillingBanner: React.FC = ({ organizationName }) => { - return ( - ( - - {node} - - ), - }} - /> - } - /> - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/RemainingCredits.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/RemainingCredits.tsx deleted file mode 100644 index 90b5360b4c0..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/RemainingCredits.tsx +++ /dev/null @@ -1,128 +0,0 @@ -import React, { useEffect, useRef, useState } from "react"; -import { FormattedMessage, FormattedNumber, useIntl } from "react-intl"; -import { useSearchParams } from "react-router-dom"; -import { useEffectOnce } from "react-use"; - -import { Button } from "components/ui/Button"; -import { Card } from "components/ui/Card"; -import { FlexContainer, FlexItem } from "components/ui/Flex"; -import { ExternalLink } from "components/ui/Link"; -import { Text } from "components/ui/Text"; - -import { useGetCloudWorkspace, useInvalidateCloudWorkspace } from "core/api/cloud"; -import { CloudWorkspaceRead } from "core/api/types/CloudApi"; -import { useAuthService } from "core/services/auth"; -import { links } from "core/utils/links"; -import { useIntent } from "core/utils/rbac"; -import { useModalService } from "hooks/services/Modal"; -import { useCurrentWorkspace } from "hooks/services/useWorkspace"; - -import { BillingBanners } from "./BillingBanners"; -import { CheckoutCreditsModal } from "./CheckoutCreditsModal"; - -export const STRIPE_SUCCESS_QUERY = "stripeCheckoutSuccess"; - -/** - * Checks whether the given cloud workspace had a recent increase in credits. - */ -function hasRecentCreditIncrease(cloudWorkspace: CloudWorkspaceRead): boolean { - const lastIncrement = cloudWorkspace.lastCreditPurchaseIncrementTimestamp; - return lastIncrement ? Date.now() - lastIncrement < 30000 : false; -} - -export const RemainingCredits: React.FC = () => { - const { formatMessage } = useIntl(); - const retryIntervalId = useRef(); - const currentWorkspace = useCurrentWorkspace(); - const cloudWorkspace = useGetCloudWorkspace(currentWorkspace.workspaceId); - const [searchParams, setSearchParams] = useSearchParams(); - const invalidateCloudWorkspace = useInvalidateCloudWorkspace(currentWorkspace.workspaceId); - const [isWaitingForCredits, setIsWaitingForCredits] = useState(false); - const { openModal } = useModalService(); - const canBuyCredits = useIntent("BuyCredits", { workspaceId: currentWorkspace.workspaceId }); - - const { emailVerified } = useAuthService(); - - useEffectOnce(() => { - // If we are coming back from a successful stripe checkout ... - if (searchParams.has(STRIPE_SUCCESS_QUERY)) { - // Remove the stripe parameter from the URL - setSearchParams({}, { replace: true }); - // If the workspace doesn't have a recent increase in credits our server has not yet - // received the Stripe callback or updated the workspace information. We're going to - // switch into a loading mode and reload the workspace every 3s from now on until - // the workspace has received the credit update (see useEffect below) - if (!hasRecentCreditIncrease(cloudWorkspace)) { - setIsWaitingForCredits(true); - retryIntervalId.current = window.setInterval(() => { - invalidateCloudWorkspace(); - }, 3000); - } - } - - return () => clearInterval(retryIntervalId.current); - }); - - useEffect(() => { - // Whenever the `cloudWorkspace` changes and now has a recent credit increment, while we're still waiting - // for new credits to come in (i.e. the retryIntervalId is still set), we know that we now - // handled the actual credit purchase and can clean the interval and loading state. - if (retryIntervalId.current && hasRecentCreditIncrease(cloudWorkspace)) { - clearInterval(retryIntervalId.current); - retryIntervalId.current = undefined; - setIsWaitingForCredits(false); - } - }, [cloudWorkspace]); - - const showCreditsModal = async () => { - await openModal({ - title: formatMessage({ id: "credits.checkoutModalTitle" }), - size: "md", - content: CheckoutCreditsModal, - }); - }; - - return ( - - - - - - - - - - - - - - - - - - - - - - - - - - ); -}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateAvailableSourcesAndDestinations.test.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateAvailableSourcesAndDestinations.test.tsx deleted file mode 100644 index 5260a14a13b..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateAvailableSourcesAndDestinations.test.tsx +++ /dev/null @@ -1,18 +0,0 @@ -import { mockConsumptionSixMonths } from "test-utils/mock-data/mockBillingData"; - -import { calculateAvailableSourcesAndDestinations } from "./calculateAvailableSourcesAndDestinations"; - -describe("calculateAvailableSourcesAndDestinations", () => { - it("calculates a complete set of available sources and destinations", () => { - const result = calculateAvailableSourcesAndDestinations(mockConsumptionSixMonths); - expect(result.destinations).toHaveLength(2); - expect(result.sources).toHaveLength(3); - - expect(result.destinations[0].connectedSources).toHaveLength(2); - expect(result.destinations[1].connectedSources).toHaveLength(1); - - expect(result.sources[0].connectedDestinations).toHaveLength(1); - expect(result.sources[1].connectedDestinations).toHaveLength(1); - expect(result.sources[2].connectedDestinations).toHaveLength(1); - }); -}); diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateAvailableSourcesAndDestinations.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateAvailableSourcesAndDestinations.tsx deleted file mode 100644 index d4921a6f92f..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateAvailableSourcesAndDestinations.tsx +++ /dev/null @@ -1,64 +0,0 @@ -import { ConsumptionRead } from "core/api/types/CloudApi"; - -import { AvailableDestination, AvailableSource } from "./CreditsUsageContext"; - -export const calculateAvailableSourcesAndDestinations = (rawConsumptionData: ConsumptionRead[]) => { - const sourceAndDestinationMaps = rawConsumptionData.reduce( - (allSourcesAndDestinations, currentConsumptionItem) => { - // create set of sources, including merging a set of the destinations each is connected to - if (!allSourcesAndDestinations.sources[currentConsumptionItem.connection.sourceId]) { - allSourcesAndDestinations.sources[currentConsumptionItem.connection.sourceId] = { - name: currentConsumptionItem.connection.sourceConnectionName, - id: currentConsumptionItem.connection.sourceId, - icon: currentConsumptionItem.connection.sourceIcon, - supportLevel: currentConsumptionItem.connection.sourceSupportLevel, - custom: currentConsumptionItem.connection.sourceCustom, - connectedDestinations: [currentConsumptionItem.connection.destinationId], - }; - } else { - allSourcesAndDestinations.sources[currentConsumptionItem.connection.sourceId] = { - ...allSourcesAndDestinations.sources[currentConsumptionItem.connection.sourceId], - connectedDestinations: Array.from( - new Set([ - ...allSourcesAndDestinations.sources[currentConsumptionItem.connection.sourceId].connectedDestinations, - currentConsumptionItem.connection.destinationId, - ]) - ), - }; - } - // create set of destinations, including merging a set of the sources each is connected to - if (!allSourcesAndDestinations.destinations[currentConsumptionItem.connection.destinationId]) { - allSourcesAndDestinations.destinations[currentConsumptionItem.connection.destinationId] = { - name: currentConsumptionItem.connection.destinationConnectionName, - id: currentConsumptionItem.connection.destinationId, - icon: currentConsumptionItem.connection.destinationIcon, - supportLevel: currentConsumptionItem.connection.destinationSupportLevel, - custom: currentConsumptionItem.connection.destinationCustom, - connectedSources: [currentConsumptionItem.connection.sourceId], - }; - } else { - allSourcesAndDestinations.destinations[currentConsumptionItem.connection.destinationId] = { - ...allSourcesAndDestinations.destinations[currentConsumptionItem.connection.destinationId], - connectedSources: Array.from( - new Set([ - ...allSourcesAndDestinations.destinations[currentConsumptionItem.connection.destinationId] - .connectedSources, - currentConsumptionItem.connection.sourceId, - ]) - ), - }; - } - - return allSourcesAndDestinations; - }, - { - sources: {} as Record, - destinations: {} as Record, - } - ); - - return { - sources: Object.values(sourceAndDestinationMaps.sources), - destinations: Object.values(sourceAndDestinationMaps.destinations), - }; -}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.test.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.test.tsx deleted file mode 100644 index 30520eb8198..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.test.tsx +++ /dev/null @@ -1,112 +0,0 @@ -import { - mockConsumptionSixMonths, - mockConsumptionThirtyDay, - mockConsumptionYear, -} from "test-utils/mock-data/mockBillingData"; -import { mockWorkspaceUsage } from "test-utils/mock-data/mockWorkspaceUsage"; - -import { ConsumptionTimeWindow } from "core/api/types/CloudApi"; - -import { - calculateFreeAndPaidUsageByConnection, - calculateFreeAndPaidUsageByTimeChunk, - getWorkspaceUsageByConnection, -} from "./calculateUsageDataObjects"; - -describe("calculateUsageDataObjects", () => { - describe(`${calculateFreeAndPaidUsageByTimeChunk.name}`, () => { - describe("thirty day lookback", () => { - it("should calculate the correct usage with an empty set of filteredConsumptionData", () => { - const result = calculateFreeAndPaidUsageByTimeChunk([], ConsumptionTimeWindow.lastMonth); - expect(result).toHaveLength(0); - }); - it("should calculate the correct usage with a set of filteredConsumptionData", () => { - const result = calculateFreeAndPaidUsageByTimeChunk(mockConsumptionThirtyDay, ConsumptionTimeWindow.lastMonth); - expect(result).toHaveLength(30); - expect(result[0].freeUsage).toEqual(0); - expect(result[0].billedCost).toEqual(0); - expect(result[29].freeUsage).toEqual(316); - expect(result[29].billedCost).toEqual(426); - }); - }); - describe("six month lookback", () => { - it("should calculate the correct usage with an empty set of filteredConsumptionData", () => { - const result = calculateFreeAndPaidUsageByTimeChunk([], ConsumptionTimeWindow.lastMonth); - expect(result).toHaveLength(0); - }); - it("should calculate the correct usage with a set of filteredConsumptionData", () => { - const result = calculateFreeAndPaidUsageByTimeChunk( - mockConsumptionSixMonths, - ConsumptionTimeWindow.lastSixMonths - ); - expect(result).toHaveLength(26); - expect(result[0].freeUsage).toEqual(0); - expect(result[0].billedCost).toEqual(0); - // three weeks ago - expect(result[22].freeUsage).toEqual(32); - expect(result[22].billedCost).toEqual(52); - // last week - expect(result[24].freeUsage).toEqual(401); - expect(result[24].billedCost).toEqual(601); - }); - }); - describe("year lookback", () => { - it("should calculate the correct usage with an empty set of filteredConsumptionData", () => { - const result = calculateFreeAndPaidUsageByTimeChunk([], ConsumptionTimeWindow.lastMonth); - expect(result).toHaveLength(0); - }); - it("should calculate the correct usage with a set of filteredConsumptionData", () => { - const result = calculateFreeAndPaidUsageByTimeChunk(mockConsumptionYear, ConsumptionTimeWindow.lastYear); - - expect(result).toHaveLength(12); - expect(result[0].freeUsage).toEqual(0); - expect(result[0].billedCost).toEqual(0); - // three months ago - expect(result[7].freeUsage).toEqual(32); - expect(result[7].billedCost).toEqual(52); - // last month - expect(result[11].freeUsage).toEqual(401); - expect(result[11].billedCost).toEqual(601); - }); - }); - }); - - describe(`${calculateFreeAndPaidUsageByConnection.name}`, () => { - it("should calculate the correct usage with an empty set of filteredConsumptionData", () => { - const result = calculateFreeAndPaidUsageByConnection([], ConsumptionTimeWindow.lastMonth); - expect(result).toHaveLength(0); - }); - it("should calculate the correct usage with a set of filteredConsumptionData", () => { - const result = calculateFreeAndPaidUsageByConnection(mockConsumptionThirtyDay, ConsumptionTimeWindow.lastMonth); - expect(result).toHaveLength(3); - - expect(result[0].connection.connectionName).toEqual("my connection"); - expect(result[0].usage).toHaveLength(30); - expect(result[0].totalUsage).toEqual(1002); - - expect(result[1].connection.connectionName).toEqual("my second connection"); - expect(result[1].usage).toHaveLength(30); - expect(result[1].totalUsage).toEqual(60); - - expect(result[2].connection.connectionName).toEqual("my third connection"); - expect(result[2].usage).toHaveLength(30); - expect(result[2].totalUsage).toEqual(24); - }); - }); -}); - -describe(`${getWorkspaceUsageByConnection.name}`, () => { - it("should calculate the correct usage with an empty set of filteredConsumptionData", () => { - const result = getWorkspaceUsageByConnection([], ConsumptionTimeWindow.lastMonth); - expect(result).toHaveLength(0); - }); - - it("should calculate the correct usage with internal, free and regular usage", () => { - const result = getWorkspaceUsageByConnection(mockWorkspaceUsage.data, ConsumptionTimeWindow.lastMonth); - expect(result).toHaveLength(2); - expect(result[0].totalFreeUsage).toEqual(10); - expect(result[0].totalInternalUsage).toEqual(10); - expect(result[0].totalBilledCost).toEqual(53.5); - expect(result[0].totalUsage).toEqual(73.5); - }); -}); diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/filesIcon.svg b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/filesIcon.svg deleted file mode 100644 index 32ab9c2b80c..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/filesIcon.svg +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/index.tsx b/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/index.tsx deleted file mode 100644 index a6c2f86eea7..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -import { BillingPage } from "./BillingPage"; - -export default BillingPage; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/AccountBalance/AccountBalance.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/AccountBalance/AccountBalance.tsx index 38948fb3442..625e9b83e68 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/AccountBalance/AccountBalance.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/AccountBalance/AccountBalance.tsx @@ -12,23 +12,22 @@ import { LoadingSkeleton } from "components/ui/LoadingSkeleton"; import { Text } from "components/ui/Text"; import { Tooltip } from "components/ui/Tooltip"; -import { useCurrentWorkspace, useGetOrganizationBillingBalance } from "core/api"; +import { useCurrentWorkspace, useGetOrganizationSubscriptionInfo } from "core/api"; import { CreditBlockRead } from "core/api/types/AirbyteClient"; import { useFormatCredits } from "core/utils/numberHelper"; export const AccountBalance = () => { const { organizationId } = useCurrentWorkspace(); const { - data: balance, + data: subscriptionInfo, isLoading: balanceIsLoading, isError: balanceError, - } = useGetOrganizationBillingBalance(organizationId); + } = useGetOrganizationSubscriptionInfo(organizationId); const { formatCredits } = useFormatCredits(); - const hasPositiveCreditBalance = !!balance?.credits?.balance && balance.credits.balance > 0; - const showCreditBalance = hasPositiveCreditBalance || balance?.planType === "prepaid"; + const hasPositiveCreditBalance = !!subscriptionInfo?.credits?.balance && subscriptionInfo.credits.balance > 0; - if (balance?.hidden) { + if (subscriptionInfo?.balanceHidden) { return null; } @@ -44,26 +43,28 @@ export const AccountBalance = () => { )} - {balance && ( + {subscriptionInfo && ( - {showCreditBalance && ( + {hasPositiveCreditBalance && ( - {!!balance?.credits?.blocks?.length && } + {!!subscriptionInfo?.credits?.blocks?.length && ( + + )} )} - {balance.planType === "in_arrears" && balance.upcomingInvoice && ( + {subscriptionInfo.upcomingInvoice && ( <> @@ -72,11 +73,11 @@ export const AccountBalance = () => { { - + diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingBanners.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingBanners.tsx index 3f7fc620266..4f38a173b71 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingBanners.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingBanners.tsx @@ -1,37 +1,11 @@ import React from "react"; -import { useIntl } from "react-intl"; -import { Link } from "components/ui/Link"; import { Message } from "components/ui/Message"; -import { useExperiment } from "hooks/services/Experiment"; import { useBillingStatusBanner } from "packages/cloud/area/billing/utils/useBillingStatusBanner"; export const BillingBanners: React.FC = () => { - const { formatMessage } = useIntl(); const billingBanner = useBillingStatusBanner("billing_page"); - const isAutoRechargeEnabled = useExperiment("billing.autoRecharge"); - return ( - <> - {billingBanner && } - {isAutoRechargeEnabled && ( - ( - - {node} - - ), - } - )} - /> - )} - - ); + return <>{billingBanner && }; }; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingInformation/BillingInformation.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingInformation/BillingInformation.tsx index 779f6dd7aff..73172c21e2d 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingInformation/BillingInformation.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingInformation/BillingInformation.tsx @@ -11,7 +11,7 @@ import { Text } from "components/ui/Text"; import { useCurrentWorkspace, useGetPaymentInformation } from "core/api"; import styles from "./BillingInformation.module.scss"; -import { useRedirectToCustomerPortal } from "../../useRedirectToCustomerPortal"; +import { useRedirectToCustomerPortal } from "../../../../area/billing/utils/useRedirectToCustomerPortal"; import { UpdateButton } from "../UpdateButton"; type CountryCodes = keyof typeof Flags; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Invoices/Invoices.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Invoices/Invoices.tsx index 17121982573..dd22c3a380c 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Invoices/Invoices.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Invoices/Invoices.tsx @@ -9,7 +9,7 @@ import { Heading } from "components/ui/Heading"; import { useCurrentWorkspace, useGetInvoices } from "core/api"; import { InvoiceGrid, InvoiceGridLoadingSkeleton } from "./InvoiceGrid"; -import { useRedirectToCustomerPortal } from "../../useRedirectToCustomerPortal"; +import { useRedirectToCustomerPortal } from "../../../../area/billing/utils/useRedirectToCustomerPortal"; import { UpdateButton } from "../UpdateButton"; export const Invoices = () => { diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/OrganizationBillingPage.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/OrganizationBillingPage.tsx index 97e1362467a..72186e0e016 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/OrganizationBillingPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/OrganizationBillingPage.tsx @@ -12,7 +12,13 @@ import { ExternalLink } from "components/ui/Link"; import { Message } from "components/ui/Message"; import { Text } from "components/ui/Text"; -import { useCurrentOrganizationInfo, useCurrentWorkspace, useGetOrganizationBillingBalance } from "core/api"; +import { + HttpProblem, + useCurrentOrganizationInfo, + useCurrentWorkspace, + useGetOrganizationSubscriptionInfo, +} from "core/api"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { links } from "core/utils/links"; import { useFormatCredits } from "core/utils/numberHelper"; @@ -21,16 +27,23 @@ import { BillingBanners } from "./BillingBanners"; import { BillingInformation } from "./BillingInformation"; import { Invoices } from "./Invoices"; import { PaymentMethod } from "./PaymentMethod"; -import { useRedirectToCustomerPortal } from "../useRedirectToCustomerPortal"; +import { Subscription } from "./Subscription"; +import { useRedirectToCustomerPortal } from "../../../area/billing/utils/useRedirectToCustomerPortal"; export const OrganizationBillingPage: React.FC = () => { + useTrackPage(PageTrackingCodes.SETTINGS_ORGANIZATION_BILLING); + const { formatMessage } = useIntl(); const { organizationId } = useCurrentWorkspace(); const { billing } = useCurrentOrganizationInfo(); const { goToCustomerPortal, redirecting } = useRedirectToCustomerPortal("portal"); const { formatCredits } = useFormatCredits(); - const { data: balance } = useGetOrganizationBillingBalance(organizationId); + const { data: subscriptionInfo, isLoading, error } = useGetOrganizationSubscriptionInfo(organizationId); + + const noSubscriptionExists = !isLoading && HttpProblem.isType(error, "error:billing/no-active-subscription"); + + const hideAccountBalance = noSubscriptionExists || subscriptionInfo?.balanceHidden; return ( @@ -58,7 +71,9 @@ export const OrganizationBillingPage: React.FC = () => { - + {!noSubscriptionExists && } + + {!hideAccountBalance && } @@ -75,13 +90,13 @@ export const OrganizationBillingPage: React.FC = () => { ) : ( - {!!balance?.credits?.balance && balance?.credits?.balance > 0 && ( + {!!subscriptionInfo?.credits?.balance && subscriptionInfo?.credits?.balance > 0 && ( } diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/PaymentMethod/PaymentMethod.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/PaymentMethod/PaymentMethod.tsx index 8ec12ab7025..29310f94cba 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/PaymentMethod/PaymentMethod.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/PaymentMethod/PaymentMethod.tsx @@ -13,7 +13,7 @@ import { useCurrentWorkspace, useGetPaymentInformation } from "core/api"; import { PaymentMethodRead } from "core/api/types/AirbyteClient"; import styles from "./PaymentMethod.module.scss"; -import { useRedirectToCustomerPortal } from "../../useRedirectToCustomerPortal"; +import { useRedirectToCustomerPortal } from "../../../../area/billing/utils/useRedirectToCustomerPortal"; import { CreditCardLogo } from "../CreditCardLogo"; import LinkLogo from "../logos/link.svg?react"; import { UpdateButton } from "../UpdateButton"; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Subscription/Subscription.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Subscription/Subscription.tsx new file mode 100644 index 00000000000..5e2175d663c --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Subscription/Subscription.tsx @@ -0,0 +1,49 @@ +import { FormattedDate, FormattedMessage } from "react-intl"; + +import { BorderedTile } from "components/ui/BorderedTiles"; +import { Box } from "components/ui/Box"; +import { DataLoadingError } from "components/ui/DataLoadingError"; +import { FlexContainer, FlexItem } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; +import { LoadingSkeleton } from "components/ui/LoadingSkeleton"; +import { Text } from "components/ui/Text"; + +import { useCurrentWorkspace, useGetOrganizationSubscriptionInfo } from "core/api"; + +export const Subscription: React.FC = () => { + const { organizationId } = useCurrentWorkspace(); + const { data: subscription, isLoading, isError } = useGetOrganizationSubscriptionInfo(organizationId); + return ( + + + + + + {isLoading && } + {subscription && ( + + {subscription.name} + {subscription.cancellationDate && ( + + + + + + + + + + + + )} + + )} + {isError && ( + + + + )} + + + ); +}; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Subscription/index.ts b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Subscription/index.ts new file mode 100644 index 00000000000..dfe3ce90bcc --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/Subscription/index.ts @@ -0,0 +1 @@ +export { Subscription } from "./Subscription"; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/index.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/index.tsx index e63f4e9f188..e78df507c90 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/index.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/index.tsx @@ -1 +1 @@ -export * from "./OrganizationBillingPage"; +export { OrganizationBillingPage as default } from "./OrganizationBillingPage"; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationUsagePage/OrganizationUsagePage.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationUsagePage/OrganizationUsagePage.tsx index cd9e1301f45..f74364bfe61 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationUsagePage/OrganizationUsagePage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationUsagePage/OrganizationUsagePage.tsx @@ -7,6 +7,7 @@ import { ListBox } from "components/ui/ListBox"; import { Text } from "components/ui/Text"; import { ConsumptionTimeWindow } from "core/api/types/AirbyteClient"; +import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { links } from "core/utils/links"; import { UsagePerDayGraph } from "packages/cloud/area/billing/components/UsagePerDayGraph"; @@ -15,6 +16,8 @@ import styles from "./OrganizationUsagePage.module.scss"; import { UsageByWorkspaceTable } from "./UsageByWorkspaceTable"; export const OrganizationUsagePage: React.FC = () => { + useTrackPage(PageTrackingCodes.SETTINGS_ORGANIZATION_USAGE); + return ( diff --git a/airbyte-webapp/src/packages/cloud/views/billing/index.tsx b/airbyte-webapp/src/packages/cloud/views/billing/index.tsx deleted file mode 100644 index 928d186ac44..00000000000 --- a/airbyte-webapp/src/packages/cloud/views/billing/index.tsx +++ /dev/null @@ -1,3 +0,0 @@ -import BillingPage from "./BillingPage"; - -export default BillingPage; diff --git a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx index 0a5e5c7833a..d5a7011da51 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx +++ b/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/CloudMainView.tsx @@ -7,7 +7,7 @@ import { FlexContainer } from "components/ui/Flex"; import { useListCloudWorkspacesInfinite } from "core/api/cloud"; import { DefaultErrorBoundary } from "core/errors"; -import { StatusBanner } from "packages/cloud/area/billing/components/StatusBanner/StatusBanner"; +import { StatusBanner } from "packages/cloud/area/billing/components/StatusBanner"; import { SideBar } from "views/layout/SideBar/SideBar"; import { CloudHelpDropdown } from "./CloudHelpDropdown"; diff --git a/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx b/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx index 1c9d68779f2..1c9195f17e0 100644 --- a/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/settings/CloudSettingsPage.tsx @@ -16,7 +16,6 @@ import { FeatureItem, useFeature } from "core/services/features"; import { isOsanoActive, showOsanoDrawer } from "core/utils/dataPrivacy"; import { Intent, useIntent, useGeneratedIntent } from "core/utils/rbac"; import { useExperiment } from "hooks/services/Experiment"; -import { useShowBillingPageV2 } from "packages/cloud/area/billing/utils/useShowBillingPage"; import { CloudSettingsRoutePaths } from "./routePaths"; @@ -27,7 +26,6 @@ export const CloudSettingsPage: React.FC = () => { const workspace = useCurrentWorkspace(); const canViewOrgSettings = useIntent("ViewOrganizationSettings", { organizationId: workspace.organizationId }); const showAdvancedSettings = useExperiment("settings.showAdvancedSettings"); - const showBillingPageV2 = useShowBillingPageV2(); const canManageOrganizationBilling = useGeneratedIntent(Intent.ManageOrganizationBilling); const canViewOrganizationUsage = useGeneratedIntent(Intent.ViewOrganizationUsage); @@ -96,13 +94,11 @@ export const CloudSettingsPage: React.FC = () => { to={CloudSettingsRoutePaths.Notifications} /> - {showBillingPageV2 && ( - - )} + {canViewOrgSettings && ( @@ -116,14 +112,14 @@ export const CloudSettingsPage: React.FC = () => { name={formatMessage({ id: "settings.members" })} to={CloudSettingsRoutePaths.OrganizationMembers} /> - {showBillingPageV2 && canManageOrganizationBilling && ( + {canManageOrganizationBilling && ( )} - {showBillingPageV2 && canViewOrganizationUsage && ( + {canViewOrganizationUsage && ( { + useTrackPage(PageTrackingCodes.SETTINGS_WORKSPACE_USAGE); return ( diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/ConnectorOptionLabel.module.scss b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/ConnectorOptionLabel.module.scss similarity index 100% rename from airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/ConnectorOptionLabel.module.scss rename to airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/ConnectorOptionLabel.module.scss diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/ConnectorOptionLabel.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/ConnectorOptionLabel.tsx similarity index 100% rename from airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/ConnectorOptionLabel.tsx rename to airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/ConnectorOptionLabel.tsx diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageContext.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/CreditsUsageContext.tsx similarity index 57% rename from airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageContext.tsx rename to airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/CreditsUsageContext.tsx index f552c535322..332f5dbd312 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageContext.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/CreditsUsageContext.tsx @@ -1,19 +1,14 @@ -import dayjs from "dayjs"; -import { createContext, useContext, useMemo, useState } from "react"; +import { createContext, useContext, useMemo } from "react"; import { Option } from "components/ui/ListBox"; -import { useCurrentWorkspace, useFilters } from "core/api"; -import { useGetCloudWorkspaceUsage, useGetWorkspaceUsage } from "core/api/cloud"; -import { DestinationId, SourceId, SupportLevel } from "core/api/types/AirbyteClient"; -import { ConsumptionTimeWindow } from "core/api/types/CloudApi"; +import { useFilters } from "core/api"; +import { useGetWorkspaceUsage } from "core/api/cloud"; +import { ConsumptionTimeWindow, DestinationId, SourceId, SupportLevel } from "core/api/types/AirbyteClient"; import { UsagePerTimeChunk } from "packages/cloud/area/billing/utils/chartUtils"; -import { calculateAvailableSourcesAndDestinations } from "./calculateAvailableSourcesAndDestinations"; import { ConnectionFreeAndPaidUsage, - calculateFreeAndPaidUsageByTimeChunk, - calculateFreeAndPaidUsageByConnection, getWorkspaceUsageByTimeChunk, getWorkspaceUsageByConnection, } from "./calculateUsageDataObjects"; @@ -68,103 +63,6 @@ interface FilterValues { selectedDestination: DestinationId | null; } -export const CreditsUsageContextProvider: React.FC> = ({ children }) => { - const [filters, setFilterValue] = useFilters({ - selectedTimeWindow: ConsumptionTimeWindow.lastMonth, - selectedSource: null, - selectedDestination: null, - }); - const { selectedTimeWindow, selectedSource, selectedDestination } = filters; - - const [hasFreeUsage, setHasFreeUsage] = useState(false); - - const { workspaceId } = useCurrentWorkspace(); - const data = useGetCloudWorkspaceUsage(workspaceId, filters.selectedTimeWindow); - - const { consumptionPerConnectionPerTimeframe, timeWindow } = data; - - const rawConsumptionData = useMemo(() => { - return consumptionPerConnectionPerTimeframe.map((consumption) => { - if (consumption.freeUsage > 0) { - setHasFreeUsage(true); - } - - return { - ...consumption, - startTime: dayjs(consumption.startTime).format("YYYY-MM-DD"), - endTime: dayjs(consumption.endTime).format("YYYY-MM-DD"), - }; - }); - }, [consumptionPerConnectionPerTimeframe]); - - const availableSourcesAndDestinations = useMemo( - () => calculateAvailableSourcesAndDestinations(rawConsumptionData), - [rawConsumptionData] - ); - - const filteredConsumptionData = useMemo(() => { - if (selectedSource && selectedDestination) { - return rawConsumptionData.filter( - (consumption) => - consumption.connection.sourceId === selectedSource && - consumption.connection.destinationId === selectedDestination - ); - } else if (selectedSource) { - return rawConsumptionData.filter((consumption) => consumption.connection.sourceId === selectedSource); - } else if (selectedDestination) { - return rawConsumptionData.filter((consumption) => consumption.connection.destinationId === selectedDestination); - } - - return rawConsumptionData; - }, [rawConsumptionData, selectedDestination, selectedSource]); - - const sourceOptions = useMemo( - () => - availableSourcesAndDestinations.sources - .filter((source) => (selectedDestination ? source.connectedDestinations.includes(selectedDestination) : true)) - .map((source) => ({ - label: , - value: source.id, - })), - [availableSourcesAndDestinations.sources, selectedDestination] - ); - - const destinationOptions = useMemo( - () => - availableSourcesAndDestinations.destinations - .filter((destination) => (selectedSource ? destination.connectedSources.includes(selectedSource) : true)) - .map((destination) => ({ - label: , - value: destination.id, - })), - [availableSourcesAndDestinations.destinations, selectedSource] - ); - - return ( - setFilterValue("selectedSource", selectedSource), - selectedDestination, - setSelectedDestination: (selectedDestination: DestinationId | null) => - setFilterValue("selectedDestination", selectedDestination), - selectedTimeWindow, - setSelectedTimeWindow: (selectedTimeWindow: ConsumptionTimeWindow) => - setFilterValue("selectedTimeWindow", selectedTimeWindow), - hasFreeUsage, - // There is no internal usage in the old billing page, only the new workspace usage page - hasInternalUsage: false, - }} - > - {children} - - ); -}; - export const WorkspaceCreditUsageContextProvider: React.FC> = ({ children }) => { const [filters, setFilterValue] = useFilters({ selectedTimeWindow: ConsumptionTimeWindow.lastMonth, diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageFilters.module.scss b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/CreditsUsageFilters.module.scss similarity index 100% rename from airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageFilters.module.scss rename to airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/CreditsUsageFilters.module.scss diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageFilters.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/CreditsUsageFilters.tsx similarity index 96% rename from airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageFilters.tsx rename to airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/CreditsUsageFilters.tsx index 136ef7e057a..68919fbf1bf 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/CreditsUsageFilters.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/CreditsUsageFilters.tsx @@ -7,8 +7,7 @@ import { Icon } from "components/ui/Icon"; import { ListBox, ListBoxControlButtonProps } from "components/ui/ListBox"; import { Text } from "components/ui/Text"; -import { DestinationId, SourceId } from "core/api/types/AirbyteClient"; -import { ConsumptionTimeWindow } from "core/api/types/CloudApi"; +import { ConsumptionTimeWindow, DestinationId, SourceId } from "core/api/types/AirbyteClient"; import { useCreditsContext } from "./CreditsUsageContext"; import styles from "./CreditsUsageFilters.module.scss"; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/UsagePerConnectionTable.module.scss b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/UsagePerConnectionTable.module.scss similarity index 100% rename from airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/UsagePerConnectionTable.module.scss rename to airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/UsagePerConnectionTable.module.scss diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/UsagePerConnectionTable.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/UsagePerConnectionTable.tsx similarity index 99% rename from airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/UsagePerConnectionTable.tsx rename to airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/UsagePerConnectionTable.tsx index e9af7052c42..51ac1694cf3 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/UsagePerConnectionTable.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/UsagePerConnectionTable.tsx @@ -148,9 +148,9 @@ export const UsagePerConnectionTable: React.FC = ( ) : ( )} diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/calculateUsageDataObjects.test.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/calculateUsageDataObjects.test.tsx new file mode 100644 index 00000000000..56e4be6868a --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/calculateUsageDataObjects.test.tsx @@ -0,0 +1,21 @@ +import { mockWorkspaceUsage } from "test-utils/mock-data/mockWorkspaceUsage"; + +import { ConsumptionTimeWindow } from "core/api/types/AirbyteClient"; + +import { getWorkspaceUsageByConnection } from "./calculateUsageDataObjects"; + +describe(`${getWorkspaceUsageByConnection.name}`, () => { + it("should calculate the correct usage with an empty set of filteredConsumptionData", () => { + const result = getWorkspaceUsageByConnection([], ConsumptionTimeWindow.lastMonth); + expect(result).toHaveLength(0); + }); + + it("should calculate the correct usage with internal, free and regular usage", () => { + const result = getWorkspaceUsageByConnection(mockWorkspaceUsage.data, ConsumptionTimeWindow.lastMonth); + expect(result).toHaveLength(2); + expect(result[0].totalFreeUsage).toEqual(10); + expect(result[0].totalInternalUsage).toEqual(10); + expect(result[0].totalBilledCost).toEqual(53.5); + expect(result[0].totalUsage).toEqual(73.5); + }); +}); diff --git a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/calculateUsageDataObjects.tsx similarity index 56% rename from airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.tsx rename to airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/calculateUsageDataObjects.tsx index 4d728e98050..161a25b0033 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/BillingPage/components/calculateUsageDataObjects.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/components/calculateUsageDataObjects.tsx @@ -1,11 +1,43 @@ -import dayjs, { ConfigType } from "dayjs"; - -import { ConnectionUsageRead, TimeframeUsage } from "core/api/types/AirbyteClient"; -import { ConnectionProto, ConsumptionRead, ConsumptionTimeWindow } from "core/api/types/CloudApi"; +import dayjs from "dayjs"; + +import { + ConnectionSchedule, + ConnectionScheduleType, + ConnectionStatus, + ConnectionUsageRead, + ConsumptionTimeWindow, + ReleaseStage, + SupportLevel, + TimeframeUsage, +} from "core/api/types/AirbyteClient"; import { generateArrayForTimeWindow, UsagePerTimeChunk } from "packages/cloud/area/billing/utils/chartUtils"; +interface ConnectionSummary { + connectionId: string; + connectionName: string; + connectionScheduleType?: ConnectionScheduleType; + connectionSchedule?: ConnectionSchedule; + status: ConnectionStatus; + sourceId: string; + sourceConnectionName: string; + sourceCustom: boolean; + sourceIcon: string; + sourceDefinitionId: string; + sourceDefinitionName: string; + sourceReleaseStage: ReleaseStage; + sourceSupportLevel: SupportLevel; + destinationId: string; + destinationConnectionName: string; + destinationCustom: boolean; + destinationIcon: string; + destinationDefinitionId: string; + destinationDefinitionName: string; + destinationReleaseStage: ReleaseStage; + destinationSupportLevel: SupportLevel; +} + export interface ConnectionFreeAndPaidUsage { - connection: ConnectionProto; + connection: ConnectionSummary; usage: UsagePerTimeChunk; totalFreeUsage: number; totalBilledCost: number; @@ -13,89 +45,6 @@ export interface ConnectionFreeAndPaidUsage { totalUsage: number; } -/** - * if there is no consumption for a given time chunk (in this case, day) we will not receive a data point - * however, we still want to include that day on our graph, so we create an array with an entry for each time window - * then backfill it with the data from the API. - */ - -const mergeUsageData = (usageArray: UsagePerTimeChunk, consumption: ConsumptionRead) => { - const timeframeItemIndex = usageArray.findIndex((item) => { - // first two params are the start and end of the timeframe - // final param makes the compare inclusive - const isBetween = dayjs(consumption.startTime as ConfigType).isBetween( - dayjs(item.startTime), - dayjs(item.endTime), - "day", - "[)" - ); - - return isBetween; - }); - - if (timeframeItemIndex !== -1) { - const usage = usageArray[timeframeItemIndex]; - usage.billedCost += consumption.billedCost; - usage.freeUsage += consumption.freeUsage; - } -}; -export const calculateFreeAndPaidUsageByConnection = ( - filteredConsumptionData: ConsumptionRead[], - timeWindow: ConsumptionTimeWindow -) => { - if (filteredConsumptionData.length === 0) { - return []; - } - const usagePerConnection = filteredConsumptionData.reduce( - (allConsumption, consumption) => { - const { connection } = consumption; - - // if this connection isn't in our list yet, add it - // also, generate an array for the usage array - if (!allConsumption[connection.connectionId]) { - allConsumption[connection.connectionId] = { - connection, - totalFreeUsage: consumption.freeUsage, - totalBilledCost: consumption.billedCost, - // Hard-coded to 0 because there is no internal usage in the old billing page, only in the new workspace usage page - totalInternalUsage: 0, - totalUsage: consumption.freeUsage + consumption.billedCost, - usage: generateArrayForTimeWindow(timeWindow), - }; - } else { - allConsumption[connection.connectionId].totalFreeUsage += consumption.freeUsage; - allConsumption[connection.connectionId].totalBilledCost += consumption.billedCost; - allConsumption[connection.connectionId].totalUsage += consumption.freeUsage + consumption.billedCost; - } - - mergeUsageData(allConsumption[connection.connectionId].usage, consumption); - - return allConsumption; - }, - {} as Record - ); - - const array = Object.values(usagePerConnection); - return array; -}; - -// currently assumes a default time window of 30 days and no other conditions (yet) -export const calculateFreeAndPaidUsageByTimeChunk = ( - filteredConsumptionData: ConsumptionRead[], - timeWindow: ConsumptionTimeWindow -) => { - if (filteredConsumptionData.length === 0) { - return []; - } - - const usagePerTimeChunk = generateArrayForTimeWindow(timeWindow); - - filteredConsumptionData.forEach((consumption) => { - mergeUsageData(usagePerTimeChunk, consumption); - }); - return usagePerTimeChunk; -}; - // Used for the workspace usage page export function getWorkspaceUsageByConnection( filteredWorkspaceUsage: ConnectionUsageRead[], @@ -108,6 +57,8 @@ export function getWorkspaceUsageByConnection( connection: { connectionId: usage.connection.connectionId, connectionName: usage.connection.name, + connectionScheduleType: usage.connection.scheduleType, + connectionSchedule: usage.connection.schedule, status: usage.connection.status, sourceId: usage.source.sourceId, sourceConnectionName: usage.source.name, diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/index.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/index.tsx index 9cf00d5e65e..df9ef369e20 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/index.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspaceUsagePage/index.tsx @@ -1 +1 @@ -export * from "./WorkspaceUsagePage"; +export { WorkspaceUsagePage as default } from "./WorkspaceUsagePage"; diff --git a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx index 0ab508f1403..455a66a27cc 100644 --- a/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx +++ b/airbyte-webapp/src/packages/cloud/views/workspaces/WorkspacesPage/CloudWorkspacesPage.test.tsx @@ -33,8 +33,6 @@ jest.mock("core/api", () => ({ email: `${id}@example.com`, organizationId: id, organizationName: `Org ${id}`, - orgLevelBilling: false, - pba: false, })), })); diff --git a/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.module.scss b/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.module.scss index 8bfe4ba6c4b..ef64e6438c5 100644 --- a/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.module.scss +++ b/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.module.scss @@ -7,7 +7,7 @@ .pageBody { grid-area: page-content; - overflow-y: auto; + padding-top: 0; } .alignSelfStart { diff --git a/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.tsx b/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.tsx index 4986aabab6d..077562b1959 100644 --- a/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.tsx +++ b/airbyte-webapp/src/pages/connections/AllConnectionsPage/AllConnectionsPage.tsx @@ -5,7 +5,6 @@ import { useNavigate } from "react-router-dom"; import { LoadingPage } from "components"; import { ConnectionOnboarding } from "components/connection/ConnectionOnboarding"; import { HeadTitle } from "components/HeadTitle"; -import { Box } from "components/ui/Box"; import { Button } from "components/ui/Button"; import { FlexContainer, FlexItem } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; @@ -72,9 +71,7 @@ export const AllConnectionsPage: React.FC = () => { } /> - - - + ) : ( diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/AddStreamForMappingComboBox.module.scss b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/AddStreamForMappingComboBox.module.scss new file mode 100644 index 00000000000..0232d81abf7 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/AddStreamForMappingComboBox.module.scss @@ -0,0 +1,89 @@ +@use "scss/variables"; +@use "scss/colors"; + +.addStreamForMappingComboBox { + min-width: variables.$width-extra-wide-menu; + border-radius: variables.$border-radius-md; + + &--secondary { + border: variables.$border-thin solid colors.$grey-100; + width: 150px; + + input { + color: colors.$grey-400; + + &:hover { + color: colors.$grey-400; + } + + &::placeholder { + color: colors.$grey-300; + } + } + + &:not(:focus-within) { + background-color: colors.$white; + color: colors.$grey-400; + + input { + cursor: pointer; + } + + button svg { + color: colors.$white; + } + } + + &:not(.disabled):hover { + border-color: colors.$grey-400; + color: colors.$grey-600; + } + + input:not(:focus-within) { + font-weight: 600; + color: colors.$grey-400; + + &::placeholder { + color: colors.$grey-400; + text-align: center; + } + } + + &.disabled { + input:not(:focus-within) { + font-weight: 600; + color: colors.$grey-100; + + &::placeholder { + color: colors.$grey-100; + text-align: center; + } + } + } + } + + &:not(&--secondary) { + &:not(:focus-within) { + background-color: colors.$blue; + color: colors.$white; + + input { + cursor: pointer; + } + } + + input:not(:focus-within) { + color: colors.$white; + + &::placeholder { + color: colors.$white; + text-align: center; + } + } + } + + .disabled { + background-color: colors.$grey-300; + color: colors.$white; + } +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/AddStreamForMappingComboBox.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/AddStreamForMappingComboBox.tsx new file mode 100644 index 00000000000..2d7ce64dbad --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/AddStreamForMappingComboBox.tsx @@ -0,0 +1,65 @@ +import classNames from "classnames"; +import { useState } from "react"; +import { FormattedMessage, useIntl } from "react-intl"; + +import { Button } from "components/ui/Button"; +import { ComboBox } from "components/ui/ComboBox"; +import { Tooltip } from "components/ui/Tooltip"; + +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; + +import styles from "./AddStreamForMappingComboBox.module.scss"; +import { useMappingContext } from "./MappingContext"; +import { useGetStreamsForNewMapping } from "./useGetStreamsForNewMappings"; + +export const AddStreamForMappingComboBox: React.FC<{ secondary?: boolean }> = ({ secondary = false }) => { + const { mode } = useConnectionFormService(); + const [selectedStream, setSelectedStream] = useState(undefined); + const streamsToList = useGetStreamsForNewMapping(); + const { addStreamToMappingsList } = useMappingContext(); + const { formatMessage } = useIntl(); + + const placeholder = secondary + ? formatMessage({ id: "connections.mappings.addStream" }) + : formatMessage({ id: "connections.mappings.selectAStream" }); + + const onChange = (streamName: string) => { + setSelectedStream(streamName); + addStreamToMappingsList(streamName); + }; + + const options = streamsToList?.map((stream) => ({ + label: stream.stream?.name || "", + value: stream.stream?.name || "", + })); + const disabled = !options || options.length === 0 || mode === "readonly"; + + return ( + <> + {!disabled ? ( + + ) : ( + + {placeholder} + + } + > + + + )} + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/ConnectionMappingsList.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/ConnectionMappingsList.tsx new file mode 100644 index 00000000000..8a65967b1a4 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/ConnectionMappingsList.tsx @@ -0,0 +1,46 @@ +import { FormattedMessage } from "react-intl"; + +import { Button } from "components/ui/Button"; +import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; + +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; + +import { AddStreamForMappingComboBox } from "./AddStreamForMappingComboBox"; +import { useMappingContext } from "./MappingContext"; +import { StreamMappingsCard } from "./StreamMappingsCard"; + +export const ConnectionMappingsList: React.FC = () => { + const { streamsWithMappings, clear, submitMappings } = useMappingContext(); + const { mode } = useConnectionFormService(); + + return ( + + + + + + + + + + + + {Object.entries(streamsWithMappings).map(([streamName, mappers]) => { + if (!mappers || mappers.length === 0) { + return null; + } + + return ; + })} +
+ +
+
+
+ ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/ConnectionMappingsPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/ConnectionMappingsPage.tsx new file mode 100644 index 00000000000..dae4e677b7e --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/ConnectionMappingsPage.tsx @@ -0,0 +1,35 @@ +import { PageContainer } from "components/PageContainer"; +import { ScrollParent } from "components/ui/ScrollParent"; + +import { FeatureItem, IfFeatureDisabled, IfFeatureEnabled } from "core/services/features"; + +import { ConnectionMappingsList } from "./ConnectionMappingsList"; +import { MappingContextProvider, useMappingContext } from "./MappingContext"; +import { MappingsEmptyState } from "./MappingsEmptyState"; +import { MappingsUpsellEmptyState } from "./MappingsUpsellEmptyState"; + +export const ConnectionMappingsPage = () => { + return ( + + + + + + + + ); +}; + +const ConnectionMappingsPageContent = () => { + const { streamsWithMappings } = useMappingContext(); + return ( + <> + + {Object.entries(streamsWithMappings).length > 0 ? : } + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/EncryptionRow.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/EncryptionRow.tsx new file mode 100644 index 00000000000..090e8bc6be5 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/EncryptionRow.tsx @@ -0,0 +1,284 @@ +import React, { useState } from "react"; +import { FormProvider, useForm } from "react-hook-form"; +import { FormattedMessage, useIntl } from "react-intl"; +import { v4 as uuidv4 } from "uuid"; +import * as yup from "yup"; + +import { FlexContainer } from "components/ui/Flex"; +import { Input } from "components/ui/Input"; +import { ListBox } from "components/ui/ListBox"; +import { Text } from "components/ui/Text"; + +import { StreamMapperType, MapperConfiguration } from "core/api/types/AirbyteClient"; + +import { autoSubmitResolver } from "./autoSubmitResolver"; +import { useMappingContext } from "./MappingContext"; +import styles from "./MappingRow.module.scss"; +import { MappingTypeListBox } from "./MappingTypeListBox"; +import { SelectFieldOption, SelectTargetField } from "./SelectTargetField"; +import { useGetFieldsInStream } from "./useGetFieldsInStream"; + +export enum Algorithm { + RSA = "RSA", + AES = "AES", +} +export type AESMode = "CBC" | "CFB" | "OFB" | "CTR" | "GCM" | "ECB"; +export type AESPadding = "PKCS5Padding" | "NoPadding"; +export const aesModes: AESMode[] = ["CBC", "CFB", "OFB", "CTR", "GCM", "ECB"]; +export const aesPaddings: AESPadding[] = ["PKCS5Padding", "NoPadding"]; + +export interface EncryptionMapperFormValues { + type: StreamMapperType; + mapperConfiguration: { + id: string; + algorithm: Algorithm; + targetField: string; + fieldNameSuffix: string; + key?: string; + mode?: AESMode; + padding?: AESPadding; + publicKey?: string; + }; +} + +const rsaEncryptionMappingSchema = yup.object().shape({ + id: yup.string().required("id required"), + algorithm: yup.mixed().oneOf([Algorithm.RSA]).required("Algorithm is required"), + targetField: yup.string().required("Target field is required"), + publicKey: yup.string().required("Public key is required"), + fieldNameSuffix: yup.string().oneOf(["_encrypted"]).required("Field name suffix is required"), +}); + +const aesEncryptionMappingSchema = yup.object().shape({ + id: yup.string().required("id required"), + algorithm: yup.mixed().oneOf([Algorithm.AES]).required("Algorithm is required"), + targetField: yup.string().required("Target field is required"), + key: yup.string().required("Key is required"), + fieldNameSuffix: yup.string().oneOf(["_encrypted"]).required("Field name suffix is required"), + mode: yup.mixed().oneOf(aesModes).required("Mode is required"), + padding: yup.mixed().oneOf(aesPaddings).required("Padding is required"), +}); + +const encryptionMapperConfigSchema = yup.lazy((value) => { + switch (value.algorithm) { + case Algorithm.AES: + return aesEncryptionMappingSchema.required("AES configuration is required"); + case Algorithm.RSA: + return rsaEncryptionMappingSchema.required("RSA configuration is required"); + default: + return yup.mixed().notRequired(); + } +}); + +export const encryptionMapperSchema = yup.object().shape({ + type: yup.mixed().oneOf(["encryption"]).required(), + mapperConfiguration: encryptionMapperConfigSchema, +}); + +const aesFormSchema = yup.object().shape({ + type: yup.mixed().oneOf([StreamMapperType.encryption]).required(), + mapperConfiguration: yup + .object() + .shape({ + id: yup.string().required("id required"), + algorithm: yup.mixed().oneOf([Algorithm.AES]).required(), + targetField: yup.string().required("Target field is required"), + key: yup.string().required("Key is required"), + fieldNameSuffix: yup.string().oneOf(["_encrypted"]).required("Field name suffix is required"), + mode: yup.mixed().oneOf(aesModes).required("Mode is required"), + padding: yup.mixed().oneOf(aesPaddings).required("Padding is required"), + }) + .required(), +}); + +interface AESFormProps { + streamName: string; + mapping: MapperConfiguration; + setAlgorithm: (algorithm: Algorithm) => void; + targetFieldOptions: SelectFieldOption[]; +} + +export const AESForm: React.FC = ({ streamName, mapping, setAlgorithm, targetFieldOptions }) => { + const { updateLocalMapping, validateMappings } = useMappingContext(); + const { formatMessage } = useIntl(); + + const methods = useForm({ + defaultValues: { + type: StreamMapperType.encryption, + mapperConfiguration: { + id: mapping.mapperConfiguration.id ?? uuidv4(), + algorithm: Algorithm.AES, + targetField: mapping.mapperConfiguration.targetField ?? "", + key: mapping.mapperConfiguration.key ?? "", + fieldNameSuffix: mapping.mapperConfiguration.fieldNameSuffix ?? "_encrypted", + mode: mapping.mapperConfiguration.mode ?? "CBC", + padding: mapping.mapperConfiguration.padding ?? "PKCS5Padding", + }, + }, + resolver: autoSubmitResolver(aesFormSchema, (data) => { + updateLocalMapping(streamName, data); + validateMappings(); + }), + mode: "onBlur", + }); + + const values = methods.watch(); + + return ( + +
+ + targetFieldOptions={targetFieldOptions} + name="mapperConfiguration.targetField" + /> + + + + setAlgorithm(selectedAlgorithm)} + options={[ + { label: "AES", value: Algorithm.AES }, + { label: "RSA", value: Algorithm.RSA }, + ]} + /> + + + + + methods.setValue("mapperConfiguration.mode", mode, { shouldValidate: true })} + options={aesModes.map((mode) => ({ label: mode, value: mode }))} + /> + + methods.setValue("mapperConfiguration.padding", padding, { shouldValidate: true }) + } + options={aesPaddings.map((padding) => ({ label: padding, value: padding }))} + /> + +
+ ); +}; + +const rsaFormSchema: yup.SchemaOf = yup.object().shape({ + type: yup.mixed().oneOf([StreamMapperType.encryption]).required(), + mapperConfiguration: yup + .object() + .shape({ + id: yup.string().required("id required"), + algorithm: yup.mixed().oneOf([Algorithm.RSA]).required(), + targetField: yup.string().required("Target field is required"), + publicKey: yup.string().required("Public key is required"), + fieldNameSuffix: yup.string().oneOf(["_encrypted"]).required("Field name suffix is required"), + }) + .required(), +}); + +interface RSAFormProps { + streamName: string; + mapping: MapperConfiguration; + setAlgorithm: (algorithm: Algorithm) => void; + targetFieldOptions: SelectFieldOption[]; +} + +export const RSAForm: React.FC = ({ streamName, mapping, setAlgorithm, targetFieldOptions }) => { + const { formatMessage } = useIntl(); + const { updateLocalMapping, validateMappings } = useMappingContext(); + + const methods = useForm({ + defaultValues: { + type: StreamMapperType.encryption, + mapperConfiguration: { + id: mapping.mapperConfiguration.id ?? uuidv4(), + algorithm: Algorithm.RSA, + targetField: mapping.mapperConfiguration.targetField ?? "", + publicKey: mapping.mapperConfiguration.publicKey ?? "", + fieldNameSuffix: mapping.mapperConfiguration.fieldNameSuffix ?? "_encrypted", + }, + }, + resolver: autoSubmitResolver(rsaFormSchema, (data) => { + updateLocalMapping(streamName, data); + validateMappings(); + }), + mode: "onBlur", + }); + + return ( + +
+ + targetFieldOptions={targetFieldOptions} + name="mapperConfiguration.targetField" + /> + + + + setAlgorithm(selectedAlgorithm)} + options={[ + { label: "AES", value: Algorithm.AES }, + { label: "RSA", value: Algorithm.RSA }, + ]} + /> + + + + + +
+ ); +}; + +export const EncryptionRow: React.FC<{ + mappingId: string; + streamName: string; +}> = ({ mappingId, streamName }) => { + const { streamsWithMappings } = useMappingContext(); + const mapping = streamsWithMappings[streamName].find((m) => m.mapperConfiguration.id === mappingId); + const fieldsInStream = useGetFieldsInStream(streamName); + + const [algorithm, setAlgorithm] = useState(mapping?.mapperConfiguration.algorithm || Algorithm.RSA); + + if (!mapping) { + return null; + } + + return ( + + + + {algorithm === Algorithm.AES && ( + + )} + {algorithm === Algorithm.RSA && ( + + )} + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/FieldRenamingRow.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/FieldRenamingRow.tsx new file mode 100644 index 00000000000..1623f2cb524 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/FieldRenamingRow.tsx @@ -0,0 +1,108 @@ +import React, { useMemo } from "react"; +import { FormProvider, get, useForm, useFormContext, useFormState } from "react-hook-form"; +import { FormattedMessage, useIntl } from "react-intl"; +import { v4 as uuidv4 } from "uuid"; +import * as yup from "yup"; + +import { FormControlFooterError } from "components/forms/FormControl"; +import { FlexContainer } from "components/ui/Flex"; +import { Input } from "components/ui/Input"; +import { Text } from "components/ui/Text"; + +import { StreamMapperType } from "core/api/types/AirbyteClient"; + +import { autoSubmitResolver } from "./autoSubmitResolver"; +import { useMappingContext } from "./MappingContext"; +import styles from "./MappingRow.module.scss"; +import { MappingTypeListBox } from "./MappingTypeListBox"; +import { SelectTargetField } from "./SelectTargetField"; +import { useGetFieldsInStream } from "./useGetFieldsInStream"; + +export const fieldRenamingConfigSchema = yup.object().shape({ + id: yup.string().required("id required"), + newFieldName: yup.string().required("New field name is required"), + originalFieldName: yup.string().required("Old field name is required"), +}); + +export const fieldRenamingMapperSchema: yup.SchemaOf = yup.object().shape({ + type: yup.mixed().oneOf(["field-renaming"]).required(), + mapperConfiguration: fieldRenamingConfigSchema.required(), +}); + +interface FieldRenamingRowProps { + mappingId: number; + streamName: string; +} +interface FieldRenamingMapperFormValues { + type: StreamMapperType; + mapperConfiguration: { + id: string; + originalFieldName: string; + newFieldName: string; + }; +} + +export const FieldRenamingRow: React.FC = ({ mappingId, streamName }) => { + const { updateLocalMapping, streamsWithMappings, validateMappings } = useMappingContext(); + const mapping = streamsWithMappings[streamName].find((m) => m.mapperConfiguration.id === mappingId); + const fieldsInStream = useGetFieldsInStream(streamName); + + const defaultValues = useMemo(() => { + return { + type: StreamMapperType["field-renaming"], + mapperConfiguration: { + id: mapping?.mapperConfiguration.id ?? uuidv4(), + originalFieldName: mapping?.mapperConfiguration?.originalFieldName ?? "", + newFieldName: mapping?.mapperConfiguration?.newFieldName ?? "", + }, + }; + }, [mapping]); + + const methods = useForm({ + defaultValues, + resolver: autoSubmitResolver(fieldRenamingMapperSchema, (data) => { + updateLocalMapping(streamName, data); + validateMappings(); + }), + mode: "onBlur", + }); + + return ( + +
+ + + + targetFieldOptions={fieldsInStream} + name="mapperConfiguration.originalFieldName" + /> + + + + + +
+
+ ); +}; + +const NewFieldNameInput = () => { + const { formatMessage } = useIntl(); + const { register } = useFormContext(); + const { errors } = useFormState({ name: "mapperConfiguration.newFieldName" }); + const error = get(errors, "mapperConfiguration.newFieldName"); + return ( +
+ + {error && {error.message}} +
+ ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/HashFieldRow.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/HashFieldRow.tsx new file mode 100644 index 00000000000..b808f58fcda --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/HashFieldRow.tsx @@ -0,0 +1,158 @@ +import { useMemo } from "react"; +import { Controller, FormProvider, useForm, useFormContext } from "react-hook-form"; +import { FormattedMessage } from "react-intl"; +import { v4 as uuidv4 } from "uuid"; +import * as yup from "yup"; + +import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { ListBox, ListBoxControlButtonProps } from "components/ui/ListBox"; +import { Text } from "components/ui/Text"; + +import { MapperConfiguration, StreamMapperType } from "core/api/types/AirbyteClient"; + +import { autoSubmitResolver } from "./autoSubmitResolver"; +import { useMappingContext } from "./MappingContext"; +import styles from "./MappingRow.module.scss"; +import { MappingTypeListBox } from "./MappingTypeListBox"; +import { SelectTargetField } from "./SelectTargetField"; +import { useGetFieldsInStream } from "./useGetFieldsInStream"; + +export enum HashingMethods { + MD2 = "MD2", + MD5 = "MD5", + SHA1 = "SHA-1", + SHA224 = "SHA-224", + SHA256 = "SHA-256", + SHA384 = "SHA-384", + SHA512 = "SHA-512", +} + +export interface HashingMapperFormValues { + type: StreamMapperType; + mapperConfiguration: { + targetField: string; + method: HashingMethods; + fieldNameSuffix: string; + }; +} + +export interface HashingMapperRowProps { + type: StreamMapperType; + mapperConfiguration: MapperConfiguration; +} + +const hashingMapperConfigSchema = yup.object().shape({ + targetField: yup.string().required("Target field is required"), + method: yup.mixed().oneOf(Object.values(HashingMethods)).required("Hashing method is required"), + fieldNameSuffix: yup.string().required("Field name suffix is required"), +}); + +export const hashingMapperSchema = yup.object().shape({ + type: yup.mixed().oneOf(["hashing"]).required(), + mapperConfiguration: hashingMapperConfigSchema, +}); + +export const HashFieldRow: React.FC<{ + mappingId: string; + streamName: string; +}> = ({ mappingId, streamName }) => { + const { updateLocalMapping, streamsWithMappings, validateMappings } = useMappingContext(); + const mapping = streamsWithMappings[streamName].find((m) => m.mapperConfiguration.id === mappingId); + const fieldsInStream = useGetFieldsInStream(streamName); + + const defaultValues = useMemo(() => { + return { + type: StreamMapperType.hashing, + mapperConfiguration: { + id: mapping?.mapperConfiguration.id ?? uuidv4(), + targetField: mapping?.mapperConfiguration.targetField ?? "", + method: mapping?.mapperConfiguration.method ?? HashingMethods.MD5, + fieldNameSuffix: mapping?.mapperConfiguration.fieldNameSuffix ?? "_hashed", + }, + }; + }, [mapping]); + + const methods = useForm({ + defaultValues, + resolver: autoSubmitResolver(hashingMapperSchema, (data) => { + updateLocalMapping(streamName, data); + validateMappings(); + }), + mode: "onBlur", + }); + + if (!mapping) { + return null; + } + + return ( + +
+ + + + name="mapperConfiguration.targetField" + targetFieldOptions={fieldsInStream} + /> + + + + + +
+
+ ); +}; + +const SelectHashingMethodControlButton: React.FC> = ({ selectedOption }) => { + if (!selectedOption) { + return ( + + + + ); + } + + return ( + + {selectedOption.label} + + + ); +}; + +const supportedHashTypes = [ + { label: "MD5", value: HashingMethods.MD5 }, + { label: "SHA-256", value: HashingMethods.SHA256 }, + { label: "SHA-512", value: HashingMethods.SHA512 }, +]; + +const SelectHashingMethod = () => { + const { control } = useFormContext(); + + return ( + ( + { + field.onChange(value); + // We're using onBlur mode, so we need to manually trigger the validation + field.onBlur(); + }} + selectedValue={field.value} + options={supportedHashTypes} + /> + )} + /> + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingContext.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingContext.tsx new file mode 100644 index 00000000000..9c93594e0ea --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingContext.tsx @@ -0,0 +1,127 @@ +import React, { PropsWithChildren, createContext, useContext, useState } from "react"; +import { v4 as uuidv4 } from "uuid"; + +import { useCurrentConnection } from "core/api"; +import { ConfiguredStreamMapper, StreamMapperType } from "core/api/types/AirbyteClient"; + +import { useGetMappingsForCurrentConnection } from "./useGetMappingsForCurrentConnection"; +import { useUpdateMappingsForCurrentConnection } from "./useUpdateMappingsForCurrentConnection"; + +interface MappingContextType { + streamsWithMappings: Record; + updateLocalMapping: (streamName: string, updatedMapping: ConfiguredStreamMapper) => void; + reorderMappings: (streamName: string, newOrder: ConfiguredStreamMapper[]) => void; + clear: () => void; + submitMappings: () => Promise; + removeMapping: (streamName: string, mappingId: string) => void; + addStreamToMappingsList: (streamName: string) => void; + addMappingForStream: (streamName: string) => void; + validateMappings: () => void; +} + +const MappingContext = createContext(undefined); + +export const MappingContextProvider: React.FC = ({ children }) => { + const connection = useCurrentConnection(); + const savedStreamsWithMappings = useGetMappingsForCurrentConnection(); + const { updateMappings } = useUpdateMappingsForCurrentConnection(connection.connectionId); + const [streamsWithMappings, setStreamsWithMappings] = useState(savedStreamsWithMappings); + + const validateMappings = () => { + console.log(`validateMappings`, streamsWithMappings); + // TOOD: actually validate mappings via the API :-) + }; + + // Updates a specific mapping in the local state + const updateLocalMapping = (streamName: string, updatedMapping: ConfiguredStreamMapper) => { + console.log(`updating local mapping for stream ${streamName}`, updatedMapping); + setStreamsWithMappings((prevMappings) => ({ + ...prevMappings, + [streamName]: prevMappings[streamName].map((mapping) => + mapping.mapperConfiguration.id === updatedMapping.mapperConfiguration.id ? updatedMapping : mapping + ), + })); + }; + + const addMappingForStream = (streamName: string) => { + setStreamsWithMappings((prevMappings) => ({ + ...prevMappings, + [streamName]: [ + ...prevMappings[streamName], + { type: StreamMapperType.hashing, mapperConfiguration: { id: uuidv4() } }, + ], + })); + }; + + // Reorders the mappings for a specific stream + const reorderMappings = (streamName: string, newOrder: ConfiguredStreamMapper[]) => { + setStreamsWithMappings((prevMappings) => ({ + ...prevMappings, + [streamName]: newOrder, + })); + }; + + // Clears the mappings back to the saved state + const clear = () => { + setStreamsWithMappings(savedStreamsWithMappings); + }; + + const removeMapping = (streamName: string, mappingId: string) => { + const mappingsForStream = streamsWithMappings[streamName].filter( + (mapping) => mapping.mapperConfiguration.id !== mappingId + ); + + setStreamsWithMappings((prevMappings) => { + if (mappingsForStream.length === 0) { + const { [streamName]: removedStream, ...rest } = prevMappings; + return rest; + } + return { + ...prevMappings, + [streamName]: mappingsForStream, + }; + }); + }; + + // Submits the current mappings state to the backend + const submitMappings = async () => { + await updateMappings(streamsWithMappings); + return Promise.resolve(); + }; + + const addStreamToMappingsList = (streamName: string) => { + const newMapping: Record = { + [streamName]: [{ type: StreamMapperType.hashing, mapperConfiguration: { id: uuidv4() } }], + }; + setStreamsWithMappings((prevMappings) => ({ + ...prevMappings, + ...newMapping, + })); + }; + + return ( + + {children} + + ); +}; + +export const useMappingContext = () => { + const context = useContext(MappingContext); + if (!context) { + throw new Error("useMappingContext must be used within a MappingContextProvider"); + } + return context; +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingRow.module.scss b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingRow.module.scss new file mode 100644 index 00000000000..1b631b4f8f8 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingRow.module.scss @@ -0,0 +1,51 @@ +@use "scss/variables"; +@use "scss/colors"; + +.row { + display: flex; + flex-shrink: 0; + align-items: center; + justify-content: space-between; + background-color: colors.$foreground; + border-radius: variables.$border-radius-xs; + border: variables.$border-thin solid colors.$grey-300; + padding: variables.$spacing-md; + min-width: variables.$width-modal-md; + + input { + font-size: variables.$font-size-md; + + ::placeholder { + font-size: variables.$font-size-md; + } + } +} + +.input { + width: variables.$width-extra-wide-menu; +} + +.controlButton { + padding: 7px 8px; + line-height: 1.2; + white-space: nowrap; +} + +.rowContent { + flex: 1; + display: flex; + flex-wrap: wrap; + align-items: center; + justify-content: flex-start; +} + +.form { + display: flex; + gap: variables.$spacing-md; + align-items: center; + flex-wrap: wrap; + + p { + white-space: nowrap; + } +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingRow.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingRow.tsx new file mode 100644 index 00000000000..c34fe7db2ff --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingRow.tsx @@ -0,0 +1,81 @@ +import { useSortable } from "@dnd-kit/sortable"; +import { CSS } from "@dnd-kit/utilities"; +import { useMemo } from "react"; + +import { Button } from "components/ui/Button"; +import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; + +import { StreamMapperType } from "core/api/types/AirbyteClient"; + +import { EncryptionRow } from "./EncryptionRow"; +import { FieldRenamingRow } from "./FieldRenamingRow"; +import { HashFieldRow } from "./HashFieldRow"; +import { useMappingContext } from "./MappingContext"; +import styles from "./MappingRow.module.scss"; +import { RowFilteringRow } from "./RowFilterRow"; + +export const SupportedMappingTypes = [ + StreamMapperType.hashing, + StreamMapperType["field-renaming"], + StreamMapperType["row-filtering"], + StreamMapperType.encryption, +] as const; + +export const MappingRow: React.FC<{ + streamName: string; + id: string; +}> = ({ streamName, id }) => { + const { removeMapping, streamsWithMappings } = useMappingContext(); + const mapping = streamsWithMappings[streamName].find((m) => m.mapperConfiguration.id === id); + + const { attributes, listeners, setNodeRef, transform, transition, isDragging } = useSortable({ id }); + + const style = { + transform: CSS.Transform.toString(transform ? { ...transform, x: 0 } : null), + transition, + zIndex: isDragging ? 1 : undefined, + }; + + const RowContent = useMemo(() => { + if (mapping?.type === StreamMapperType.hashing) { + return ; + } + if (mapping?.type === StreamMapperType["field-renaming"]) { + return ; + } + if (mapping?.type === StreamMapperType["row-filtering"]) { + return ; + } + if (mapping?.type === StreamMapperType.encryption) { + return ; + } + + return null; + }, [mapping, streamName]); + + if (!RowContent || !mapping) { + return null; + } + + return ( +
+ + + + {RowContent} + + + +
+ ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingTypeListBox.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingTypeListBox.tsx new file mode 100644 index 00000000000..1a658e02480 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingTypeListBox.tsx @@ -0,0 +1,93 @@ +import { FormattedMessage } from "react-intl"; + +import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { ListBox, ListBoxControlButtonProps } from "components/ui/ListBox"; +import { Text } from "components/ui/Text"; + +import { StreamMapperType } from "core/api/types/AirbyteClient"; + +import { useMappingContext } from "./MappingContext"; +import { SupportedMappingTypes } from "./MappingRow"; +import styles from "./MappingRow.module.scss"; +import { OperationType } from "./RowFilterRow"; + +interface MappingTypeListBoxProps { + selectedValue: StreamMapperType; + streamName: string; + mappingId: string; +} + +export const MappingTypeListBox: React.FC = ({ selectedValue, streamName, mappingId }) => { + const { updateLocalMapping } = useMappingContext(); + + const mappingTypeLabels = { + hashing: { title: "connections.mappings.type.hash", description: "connections.mappings.type.hash.description" }, + "field-renaming": { + title: "connections.mappings.type.fieldRenaming", + description: "connections.mappings.type.fieldRenaming.description", + }, + "row-filtering": { + title: "connections.mappings.type.rowFiltering", + description: "connections.mappings.type.rowFiltering.description", + }, + encryption: { + title: "connections.mappings.type.encryption", + description: "connections.mappings.type.encryption.description", + }, + }; + + const supportedMappingsOptions = Object.values(SupportedMappingTypes).map((type) => ({ + label: ( + + + + + + + + + ), + value: type, + })); + + const ControlButton: React.FC> = ({ selectedOption }) => { + if (!selectedOption) { + return ( + + + + ); + } + + return ( + + + + + + + ); + }; + + return ( + { + if (value !== selectedValue) { + if (value === StreamMapperType["row-filtering"]) { + updateLocalMapping(streamName, { + type: value, + mapperConfiguration: { id: mappingId, conditions: { type: OperationType.equal } }, + }); + } else { + updateLocalMapping(streamName, { type: value, mapperConfiguration: { id: mappingId } }); + } + } + }} + /> + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingsEmptyState.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingsEmptyState.tsx new file mode 100644 index 00000000000..ff613b2630c --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingsEmptyState.tsx @@ -0,0 +1,24 @@ +import { FormattedMessage } from "react-intl"; + +import { EmptyState } from "components/EmptyState"; +import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; + +import { AddStreamForMappingComboBox } from "./AddStreamForMappingComboBox"; + +export const MappingsEmptyState: React.FC = () => { + return ( + + + + + + + } /> + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingsUpsellEmptyState.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingsUpsellEmptyState.tsx new file mode 100644 index 00000000000..9d529133484 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/MappingsUpsellEmptyState.tsx @@ -0,0 +1,53 @@ +import { FormattedMessage } from "react-intl"; + +import { Box } from "components/ui/Box"; +import { HighlightCard } from "components/ui/Card/HighlightCard"; +import { FlexContainer } from "components/ui/Flex"; +import { Heading } from "components/ui/Heading"; +import { Icon } from "components/ui/Icon"; +import { ExternalLink } from "components/ui/Link"; +import { Text } from "components/ui/Text"; + +import { FeatureItem, useFeature } from "core/services/features"; +import { links } from "core/utils/links"; +import { BrandingBadge } from "views/layout/SideBar/AirbyteHomeLink"; + +import mappings_screenshot from "./mappings_screenshot.png"; +export const MappingsUpsellEmptyState = () => { + const enterpriseUpsell = useFeature(FeatureItem.EnterpriseUpsell); + const cloudForTeamsUpsell = useFeature(FeatureItem.CloudForTeamsUpsell); + + return ( + + + + {enterpriseUpsell && } + {cloudForTeamsUpsell && } + + + + + + + + + + + + + + + + + + + + Blurred connector form + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/RowFilterRow.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/RowFilterRow.tsx new file mode 100644 index 00000000000..ec01ff9d0fd --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/RowFilterRow.tsx @@ -0,0 +1,175 @@ +import { useMemo } from "react"; +import { Controller, FormProvider, useForm, useFormContext } from "react-hook-form"; +import { FormattedMessage, useIntl } from "react-intl"; +import { v4 as uuidv4 } from "uuid"; +import * as yup from "yup"; + +import { FlexContainer } from "components/ui/Flex"; +import { Input } from "components/ui/Input"; +import { ListBox } from "components/ui/ListBox"; +import { Text } from "components/ui/Text"; + +import { ConfiguredStreamMapper, StreamMapperType } from "core/api/types/AirbyteClient"; + +import { autoSubmitResolver } from "./autoSubmitResolver"; +import { useMappingContext } from "./MappingContext"; +import styles from "./MappingRow.module.scss"; +import { MappingTypeListBox } from "./MappingTypeListBox"; +import { SelectTargetField } from "./SelectTargetField"; +import { useGetFieldsInStream } from "./useGetFieldsInStream"; + +export enum OperationType { + equal = "EQUAL", + not = "NOT", +} + +enum FilterCondition { + IN = "IN", + OUT = "OUT", +} + +export interface RowFilteringMapperFormValues { + type: StreamMapperType; + configuration: { + id: string; + condition: FilterCondition; + fieldName: string; + comparisonValue: string; + }; +} + +const simpleSchema: yup.SchemaOf = yup.object({ + type: yup.mixed().oneOf([StreamMapperType["row-filtering"]]).required(), + configuration: yup.object({ + id: yup.string().required(), + condition: yup.mixed().oneOf([FilterCondition.IN, FilterCondition.OUT]).required(), + fieldName: yup.string().required(), + comparisonValue: yup.string().required(), + }), +}); + +function formValuesToMapperConfiguration(values: RowFilteringMapperFormValues): ConfiguredStreamMapper { + if (values.configuration.condition === FilterCondition.OUT) { + return { + type: StreamMapperType["row-filtering"], + mapperConfiguration: { + id: values.configuration.id, + conditions: { + type: "NOT", + conditions: [ + { + type: "EQUAL", + fieldName: values.configuration.fieldName, + comparisonValue: values.configuration.comparisonValue, + }, + ], + }, + }, + }; + } + return { + type: StreamMapperType["row-filtering"], + mapperConfiguration: { + id: values.configuration.id, + conditions: { + type: "EQUAL", + fieldName: values.configuration.fieldName, + comparisonValue: values.configuration.comparisonValue, + }, + }, + }; +} +interface RowFilteringRowProps { + mappingId: string; + streamName: string; +} + +export const RowFilteringRow: React.FC = ({ mappingId, streamName }) => { + const { formatMessage } = useIntl(); + const { updateLocalMapping, streamsWithMappings, validateMappings } = useMappingContext(); + const mapping = streamsWithMappings[streamName].find((m) => m.mapperConfiguration.id === mappingId); + const fieldsInStream = useGetFieldsInStream(streamName); + + const defaultValues = useMemo(() => { + return { + type: StreamMapperType["row-filtering"], + configuration: { + id: mapping?.mapperConfiguration.id ?? uuidv4(), + type: mapping?.mapperConfiguration.type ?? FilterCondition.IN, + fieldName: mapping?.mapperConfiguration.fieldName ?? "", + conditionValue: mapping?.mapperConfiguration.conditionValue ?? "", + }, + }; + }, [mapping]); + + const methods = useForm({ + defaultValues, + resolver: autoSubmitResolver(simpleSchema, (values) => { + const mapperConfiguration = formValuesToMapperConfiguration(values); + updateLocalMapping(streamName, mapperConfiguration); + validateMappings(); + }), + mode: "onBlur", + }); + + if (!mapping) { + return null; + } + + return ( + +
+ + + + + + + + name="configuration.fieldName" + targetFieldOptions={fieldsInStream} + /> + + + + + +
+
+ ); +}; + +const SelectFilterType = () => { + const { control } = useFormContext(); + + const { formatMessage } = useIntl(); + + return ( + ( + + selectedValue={field.value} + options={[ + { label: formatMessage({ id: "connections.mappings.rowFilter.in" }), value: FilterCondition.IN }, + { label: formatMessage({ id: "connections.mappings.rowFilter.out" }), value: FilterCondition.OUT }, + ]} + onSelect={(selectedValue) => { + field.onChange(selectedValue); + // We're using onBlur mode, so we need to manually trigger the validation + field.onBlur(); + }} + /> + )} + /> + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/SelectTargetField.module.scss b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/SelectTargetField.module.scss new file mode 100644 index 00000000000..deeb22914c4 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/SelectTargetField.module.scss @@ -0,0 +1,63 @@ +@use "scss/variables"; +@use "scss/colors"; +@use "scss/z-indices"; + +.comboboxInput { + padding: variables.$spacing-md; + border-radius: variables.$border-radius-xs; + font-size: variables.$font-size-md; + line-height: 1.2; + cursor: pointer; + + &:focus { + background-color: colors.$white; + + &::placeholder { + color: colors.$grey-300; + text-align: left; + font-weight: 400; + } + } +} + +.caretButton { + position: absolute; + right: 0; + top: 0; + height: 100%; + background-color: transparent; + border: none; + cursor: pointer; + color: colors.$grey-300; + padding-inline: 8px; +} + +.comboboxOptions { + z-index: z-indices.$listBox; + background-color: colors.$foreground; + border-radius: variables.$border-radius-lg; + box-shadow: variables.$box-shadow-raised; + padding: 0; + margin-top: variables.$spacing-sm; + width: variables.$width-extra-wide-menu; + max-height: variables.$height-long-listbox-options-list; + list-style-type: none; + overflow-y: auto; +} + +.comboboxOption { + padding: variables.$spacing-md variables.$spacing-lg; + cursor: pointer; + word-break: break-all; + + &:hover, + &:focus-within { + background-color: colors.$grey-50; + height: 100%; + width: 100%; + } +} + +.selected { + background-color: colors.$blue-50; +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/SelectTargetField.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/SelectTargetField.tsx new file mode 100644 index 00000000000..2cc845ee54f --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/SelectTargetField.tsx @@ -0,0 +1,122 @@ +import { Combobox, ComboboxButton, ComboboxInput, ComboboxOption, ComboboxOptions } from "@headlessui/react"; +import { Float } from "@headlessui-float/react"; +import classNames from "classnames"; +import { Fragment, useState } from "react"; +import { Controller, FieldValues, Path, useFormContext } from "react-hook-form"; +import { useIntl } from "react-intl"; + +import { FlexContainer } from "components/ui/Flex"; +import { Icon } from "components/ui/Icon"; +import { Input } from "components/ui/Input"; +import { Text } from "components/ui/Text"; + +import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; + +import styles from "./SelectTargetField.module.scss"; + +export interface SelectFieldOption { + fieldName: string; + fieldType: string; + airbyteType?: string; +} + +interface SelectTargetFieldProps { + targetFieldOptions: SelectFieldOption[]; + name: Path; +} + +export const SelectTargetField = ({ + targetFieldOptions, + name, +}: SelectTargetFieldProps) => { + const { control } = useFormContext(); + + return ( + ( + { + field.onChange(value); + // We're using onBlur mode, so we need to manually trigger the validation + field.onBlur(); + }} + /> + )} + /> + ); +}; + +export interface TargetFieldOption { + fieldName: string; + fieldType: string; + airbyteType?: string; +} + +interface FieldComboBoxProps { + onSelectField: (fieldName: string) => void; + options: TargetFieldOption[]; + selectedFieldName?: string; +} + +const ComboBox: React.FC = ({ onSelectField, options, selectedFieldName }) => { + const { mode } = useConnectionFormService(); + const [query, setQuery] = useState(""); + const { formatMessage } = useIntl(); + + const filteredOptions = query === "" ? options : options.filter((option) => option.fieldName.includes(query)); + + const handleFieldNameSelect = (fieldName: string) => { + onSelectField(fieldName); + }; + + return ( + setQuery("")} + > + + + setQuery(e.target.value)} + adornment={ + + + + } + /> + + + {filteredOptions.map(({ fieldName, fieldType, airbyteType }) => ( + + {({ selected }) => ( + + {fieldName} + + {airbyteType || fieldType} + + + )} + + ))} + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/StreamMappingsCard.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/StreamMappingsCard.tsx new file mode 100644 index 00000000000..c89f35e74f0 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/StreamMappingsCard.tsx @@ -0,0 +1,72 @@ +import { + DndContext, + DragEndEvent, + KeyboardSensor, + PointerSensor, + closestCenter, + useSensor, + useSensors, +} from "@dnd-kit/core"; +import { + arrayMove, + SortableContext, + sortableKeyboardCoordinates, + verticalListSortingStrategy, +} from "@dnd-kit/sortable"; +import { FormattedMessage } from "react-intl"; + +import { Button } from "components/ui/Button"; +import { Card } from "components/ui/Card"; +import { FlexContainer } from "components/ui/Flex"; + +import { useMappingContext } from "./MappingContext"; +import { MappingRow } from "./MappingRow"; + +export const StreamMappingsCard: React.FC<{ streamName: string }> = ({ streamName }) => { + const { streamsWithMappings, reorderMappings, addMappingForStream } = useMappingContext(); + const mappingsForStream = streamsWithMappings[streamName]; + const sensors = useSensors( + useSensor(PointerSensor), + useSensor(KeyboardSensor, { + coordinateGetter: sortableKeyboardCoordinates, + }) + ); + + const handleDragEnd = (event: DragEndEvent) => { + const { active, over } = event; + + if (active.id && over?.id && active.id !== over.id) { + const oldIndex = mappingsForStream.findIndex((mapping) => mapping.mapperConfiguration.id === active.id); + const newIndex = mappingsForStream.findIndex((mapping) => mapping.mapperConfiguration.id === over.id); + + if (oldIndex !== -1 && newIndex !== -1) { + const updatedOrder = arrayMove(mappingsForStream, oldIndex, newIndex); + reorderMappings(streamName, updatedOrder); + } + } + }; + + return ( + + + mapping.mapperConfiguration.id)} + strategy={verticalListSortingStrategy} + > + + {mappingsForStream.map((mapping) => ( + + ))} + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/autoSubmitResolver.ts b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/autoSubmitResolver.ts new file mode 100644 index 00000000000..6802734feab --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/autoSubmitResolver.ts @@ -0,0 +1,30 @@ +import { FieldValues, Resolver } from "react-hook-form"; +import * as yup from "yup"; +import { AssertsShape } from "yup/lib/object"; + +export function autoSubmitResolver( + schema: yup.SchemaOf | ReturnType>>, + onSubmit: (formValues: AssertsShape) => void +): Resolver { + return async (values) => { + try { + schema.validateSync(values); + onSubmit(values); + } catch (e) { + if (!(e instanceof yup.ValidationError)) { + throw e; + } + + // TODO: parse yup.ValidationError and create a rhf FieldErrors object + console.log(e); + return { + values: {}, + errors: {}, + }; + } + return { + values, + errors: {}, + }; + }; +} diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/index.ts b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/index.ts new file mode 100644 index 00000000000..4ca1ae15ac3 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/index.ts @@ -0,0 +1 @@ +export { ConnectionMappingsPage as default } from "./ConnectionMappingsPage"; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/mappings_screenshot.png b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/mappings_screenshot.png new file mode 100644 index 00000000000..7563d7d72f9 Binary files /dev/null and b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/mappings_screenshot.png differ diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetFieldsInStream.ts b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetFieldsInStream.ts new file mode 100644 index 00000000000..a3aa7b66ef2 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetFieldsInStream.ts @@ -0,0 +1,23 @@ +/** + * todo: this lists all fields for a stream (maybe obvious) however, in reality we want all fields for a stream at the point of this mapping + * that PR is in flight to be added at a (very soon) later time + * https://github.com/airbytehq/airbyte-internal-issues/issues/11010 + */ + +import { useCurrentConnection } from "core/api"; +import { traverseSchemaToField } from "core/domain/catalog"; + +export const useGetFieldsInStream = (streamName: string) => { + const { syncCatalog } = useCurrentConnection(); + + if (!syncCatalog) { + return []; + } + const stream = syncCatalog.streams.find((s) => s.stream?.name === streamName); + + return traverseSchemaToField(stream?.stream?.jsonSchema, streamName).map((field) => ({ + fieldName: field.cleanedName, + fieldType: field.type, + airbyteType: field.airbyte_type, + })); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetMappingsForCurrentConnection.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetMappingsForCurrentConnection.tsx new file mode 100644 index 00000000000..ae5aaf88b60 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetMappingsForCurrentConnection.tsx @@ -0,0 +1,24 @@ +import { v4 as uuidv4 } from "uuid"; + +import { useCurrentConnection } from "core/api"; +import { ConfiguredStreamMapper } from "core/api/types/AirbyteClient"; + +export const useGetMappingsForCurrentConnection = (): Record => { + const connection = useCurrentConnection(); + + const mappings: Record = {}; + + connection.syncCatalog?.streams.forEach((streamItem) => { + if (streamItem.config?.mappers && streamItem.config.mappers.length > 0) { + mappings[streamItem.stream?.name ?? ""] = streamItem.config.mappers.map((mapper) => ({ + ...mapper, + mapperConfiguration: { + ...mapper.mapperConfiguration, + id: uuidv4(), + }, + })); + } + }); + + return mappings; +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetStreamsForNewMappings.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetStreamsForNewMappings.tsx new file mode 100644 index 00000000000..98ada0f5498 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useGetStreamsForNewMappings.tsx @@ -0,0 +1,24 @@ +import { useCurrentConnection } from "core/api"; + +import { useMappingContext } from "./MappingContext"; + +export const useGetStreamsForNewMapping = () => { + const connection = useCurrentConnection(); + const { streamsWithMappings } = useMappingContext(); + + return connection?.syncCatalog.streams?.filter((stream) => { + if (!stream.stream?.name) { + return false; + } + + if (streamsWithMappings[stream.stream?.name]) { + return false; + } + + if (!stream.config?.selected) { + return false; + } + + return true; + }); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useUpdateMappingsForCurrentConnection.tsx b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useUpdateMappingsForCurrentConnection.tsx new file mode 100644 index 00000000000..3934f87856e --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionMappingsPage/useUpdateMappingsForCurrentConnection.tsx @@ -0,0 +1,95 @@ +import isBoolean from "lodash/isBoolean"; +import { useIntl } from "react-intl"; + +import { useDestinationDefinitionVersion, useGetConnection, useGetStateTypeQuery } from "core/api"; +import { AirbyteStreamAndConfiguration, ConfiguredStreamMapper } from "core/api/types/AirbyteClient"; +import { trackError } from "core/utils/datadog"; +import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; +import { ModalResult, useModalService } from "hooks/services/Modal"; +import { useNotificationService } from "hooks/services/Notification"; + +import { ClearDataWarningModal } from "../ConnectionReplicationPage/ClearDataWarningModal"; +import { RecommendRefreshModal } from "../ConnectionReplicationPage/RecommendRefreshModal"; +export const useUpdateMappingsForCurrentConnection = (connectionId: string) => { + const connection = useGetConnection(connectionId); + const { supportsRefreshes: destinationSupportsRefreshes } = useDestinationDefinitionVersion( + connection.destination.destinationId + ); + const { registerNotification } = useNotificationService(); + + const getStateType = useGetStateTypeQuery(); + const { formatMessage } = useIntl(); + const { openModal } = useModalService(); + const { updateConnection } = useConnectionEditService(); + + const updateMappings = async (updatedMappings: Record) => { + const updatedCatalog: AirbyteStreamAndConfiguration[] = connection.syncCatalog.streams.map( + (streamWithConfig: AirbyteStreamAndConfiguration) => { + const streamName = streamWithConfig.stream?.name; + if (streamName && updatedMappings[streamName] && streamWithConfig.config) { + return { + ...streamWithConfig, + config: { + ...streamWithConfig.config, + mappers: updatedMappings[streamName], + }, + }; + } + return streamWithConfig; + } + ); + + async function handleModalResult(result: ModalResult) { + if (result.type === "completed" && isBoolean(result.reason)) { + await updateConnection({ + connectionId: connection.connectionId, + syncCatalog: { + streams: updatedCatalog, + }, + skipReset: !result.reason, + }) + .then(() => { + registerNotification({ + id: "connection_settings_change_success", + text: formatMessage({ id: "form.changesSaved" }), + type: "success", + }); + }) + .catch((e: Error) => { + trackError(e, { connectionName: connection.name }); + registerNotification({ + id: "connection_settings_change_error", + text: formatMessage({ id: "connection.updateFailed" }), + type: "error", + }); + }); + } else { + return Promise.reject(); + } + return Promise.resolve(); + } + + try { + if (!destinationSupportsRefreshes) { + const stateType = await getStateType(connection.connectionId); + const result = await openModal({ + title: formatMessage({ id: "connection.clearDataRecommended" }), + size: "md", + content: (props) => , + }); + await handleModalResult(result); + } else { + const result = await openModal({ + title: formatMessage({ id: "connection.refreshDataRecommended" }), + size: "md", + content: ({ onCancel, onComplete }) => , + }); + await handleModalResult(result); + } + } catch (e) { + throw new Error(e); + } + }; + + return { updateMappings }; +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx b/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx index ba99090d631..1c8384fb7f6 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionPage/ConnectionPageHeader.tsx @@ -10,6 +10,7 @@ import { Tabs, LinkTab } from "components/ui/Tabs"; import { FeatureItem, useFeature } from "core/services/features"; import { useConnectionEditService } from "hooks/services/ConnectionEdit/ConnectionEditService"; +import { useExperiment } from "hooks/services/Experiment"; import { RoutePaths, ConnectionRoutePaths } from "pages/routePaths"; import { ConnectionTitleBlock } from "./ConnectionTitleBlock"; @@ -20,6 +21,7 @@ export const ConnectionPageHeader = () => { const { formatMessage } = useIntl(); const currentTab = params["*"] || ConnectionRoutePaths.Status; const supportsDbtCloud = useFeature(FeatureItem.AllowDBTCloudIntegration); + const mappingsUIEnabled = useExperiment("connection.mappingsUI"); const { connection, schemaRefreshing } = useConnectionEditService(); const breadcrumbsData = [ @@ -56,6 +58,16 @@ export const ConnectionPageHeader = () => { to: `${basePath}/${ConnectionRoutePaths.Replication}`, disabled: schemaRefreshing, }, + ...(mappingsUIEnabled + ? [ + { + id: ConnectionRoutePaths.Mappings, + name: , + to: `${basePath}/${ConnectionRoutePaths.Mappings}`, + disabled: schemaRefreshing, + }, + ] + : []), ...(supportsDbtCloud ? [ { @@ -75,7 +87,7 @@ export const ConnectionPageHeader = () => { ]; return tabs; - }, [basePath, schemaRefreshing, connection.schemaChange, supportsDbtCloud]); + }, [basePath, schemaRefreshing, connection.schemaChange, mappingsUIEnabled, supportsDbtCloud]); return ( diff --git a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.module.scss b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.module.scss index 202d2280b0e..a3fe47a82c9 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.module.scss +++ b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.module.scss @@ -13,8 +13,3 @@ height: 100%; } } - -.scrollableContainer { - height: 100%; - padding: variables.$spacing-xl; -} diff --git a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.tsx index 15dca71540f..62d60640641 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/ConnectionReplicationPage.tsx @@ -41,15 +41,12 @@ import { useAnalyticsTrackFunctions } from "./useAnalyticsTrackFunctions"; const SchemaChangeMessage: React.FC = () => { const { isDirty } = useFormState(); const refreshWithConfirm = useRefreshSourceSchemaWithConfirmationOnDirty(isDirty); - const { refreshSchema } = useConnectionFormService(); const { connection, schemaHasBeenRefreshed, schemaRefreshing, connectionUpdating } = useConnectionEditService(); const { hasNonBreakingSchemaChange, hasBreakingSchemaChange } = useSchemaChanges(connection.schemaChange); - if (schemaHasBeenRefreshed) { return null; } - if (hasNonBreakingSchemaChange && !schemaRefreshing) { return ( { /> ); } - if (hasBreakingSchemaChange && !schemaRefreshing) { return ( { } return null; }; - const relevantConnectionKeys = [ "syncCatalog" as const, "namespaceDefinition" as const, @@ -212,7 +207,6 @@ export const ConnectionReplicationPage: React.FC = () => { useUnmount(() => { discardRefreshedSchema(); }); - const { state } = useLocation(); useEffect(() => { if (typeof state === "object" && state && "triggerRefreshSchema" in state && state.triggerRefreshSchema) { @@ -239,7 +233,7 @@ export const ConnectionReplicationPage: React.FC = () => { return (
- + defaultValues={initialValues} reinitializeDefaultValues diff --git a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap index 8ac50916513..ba7262d60fb 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap +++ b/airbyte-webapp/src/pages/connections/ConnectionReplicationPage/__snapshots__/ConnectionReplicationPage.test.tsx.snap @@ -497,26 +497,21 @@ exports[`ConnectionReplicationPage should render 1`] = ` id="headlessui-listbox-button-:rk:" type="button" > - + Full refresh | Overwrite +

+ +
- + Full refresh | Overwrite +

+ +
- + Full refresh | Overwrite +

+ +
- + Full refresh | Overwrite +

+ +
0 && + !equal(formStream.config?.mappers, connectionStream.config?.mappers); + + return promptBecauseOfSyncModes || promptBecauseOfHashing || promptBecauseOfMappingsChanges; }); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx index c9d5147f394..ae3a1abce78 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/ConnectionSettingsPage.tsx @@ -1,6 +1,7 @@ import { Disclosure, DisclosureButton, DisclosurePanel } from "@headlessui/react"; import classnames from "classnames"; -import React from "react"; +import React, { useCallback } from "react"; +import { UseFormReturn } from "react-hook-form"; import { FormattedMessage, useIntl } from "react-intl"; import { @@ -9,15 +10,16 @@ import { useInitialFormValues, } from "components/connection/ConnectionForm/formConfig"; import { ConnectionSyncContextProvider } from "components/connection/ConnectionSync/ConnectionSyncContext"; +import { I18N_KEY_UNDER_ONE_HOUR_NOT_ALLOWED } from "components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionScheduleFormField"; import { SimplifiedConnectionsSettingsCard } from "components/connection/CreateConnectionForm/SimplifiedConnectionCreation/SimplifiedConnectionSettingsCard"; import { Form } from "components/forms"; -import { ScrollableContainer } from "components/ScrollableContainer"; import { Button } from "components/ui/Button"; import { FlexContainer } from "components/ui/Flex"; +import { ScrollParent } from "components/ui/ScrollParent"; import { Spinner } from "components/ui/Spinner"; import { ConnectionActionsBlock } from "area/connection/components/ConnectionActionsBlock"; -import { useCurrentWorkspace } from "core/api"; +import { HttpError, HttpProblem, useCurrentWorkspace } from "core/api"; import { Geography, WebBackendConnectionUpdate } from "core/api/types/AirbyteClient"; import { PageTrackingCodes, useTrackPage } from "core/services/analytics"; import { trackError } from "core/utils/datadog"; @@ -39,6 +41,7 @@ export const ConnectionSettingsPage: React.FC = () => { useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM_SETTINGS); const { connection, updateConnection } = useConnectionEditService(); + const { defaultGeography } = useCurrentWorkspace(); const { formatMessage } = useIntl(); const { registerNotification } = useNotificationService(); @@ -58,19 +61,31 @@ export const ConnectionSettingsPage: React.FC = () => { }); }; - const onError = (e: Error, { name }: FormConnectionFormValues) => { - trackError(e, { connectionName: name }); - registerNotification({ - id: "connection_settings_change_error", - text: formatMessage({ id: "connection.updateFailed" }), - type: "error", - }); - }; + const onError = useCallback( + (error: Error, values: FormConnectionFormValues, methods: UseFormReturn) => { + trackError(error, { connectionName: values.name }); + if (error instanceof HttpError && HttpProblem.isType(error, "error:cron-validation/under-one-hour-not-allowed")) { + methods.setError("scheduleData.cron.cronExpression", { + message: I18N_KEY_UNDER_ONE_HOUR_NOT_ALLOWED, + }); + } + registerNotification({ + id: "connection_settings_change_error", + text: formatMessage({ id: "connection.updateFailed" }), + type: "error", + }); + }, + [formatMessage, registerNotification] + ); const isDeprecated = connection.status === "deprecated"; + const hasConfiguredGeography = + connection.geography !== undefined && + connection.geography !== defaultGeography && + connection.geography !== Geography.auto; return ( - + trackDirtyChanges @@ -83,8 +98,8 @@ export const ConnectionSettingsPage: React.FC = () => { return updateConnection(connectionUpdates); }} - onError={onError} onSuccess={onSuccess} + onError={onError} schema={validationSchema} defaultValues={simplifiedInitialValues} > @@ -94,6 +109,7 @@ export const ConnectionSettingsPage: React.FC = () => { destination={connection.destination} isCreating={false} isDeprecated={isDeprecated} + hasConfiguredGeography={hasConfiguredGeography} /> @@ -124,6 +140,6 @@ export const ConnectionSettingsPage: React.FC = () => { )} - + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/StateBlock.tsx b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/StateBlock.tsx index 444c770b659..0f14fa667af 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/StateBlock.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionSettingsPage/StateBlock.tsx @@ -126,7 +126,6 @@ export const StateBlock: React.FC = ({ connectionId, disabled } value={stateDraft ?? existingStateString} height={styles.stateEditorHeight} language="json" - automaticLayout showSuggestions={false} onChange={(value) => { setStateDraft(value ?? ""); diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineAllEventsList.test.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineAllEventsList.test.tsx index 84f47a051f3..8e51471e7e6 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineAllEventsList.test.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineAllEventsList.test.tsx @@ -38,6 +38,24 @@ describe("#validateAndMapEvent", () => { disabledReason: "SCHEMA_CHANGES_ARE_BREAKING", }, }, + { + id: "2", + createdAt: 1728936015, + eventType: ConnectionEventType.CONNECTION_DISABLED, + connectionId: mockConnection.connectionId, + summary: { + disabledReason: "INVALID_PAYMENT_METHOD", + }, + }, + { + id: "2", + createdAt: 1728936015, + eventType: ConnectionEventType.CONNECTION_DISABLED, + connectionId: mockConnection.connectionId, + summary: { + disabledReason: "INVOICE_MARKED_UNCOLLECTIBLE", + }, + }, { id: "3", createdAt: 1728936020, @@ -79,7 +97,7 @@ describe("#validateAndMapEvent", () => { const validatedEvents = validTimelineEvents .map((event) => validateAndMapEvent(event)) .filter((event) => event !== null); - expect(validatedEvents).toHaveLength(4); + expect(validatedEvents).toHaveLength(validTimelineEvents.length); }); it("removes invalid events from list and triggers error tracking", () => { const eventsWithInvalidEvent = [ @@ -108,7 +126,7 @@ describe("#validateAndMapEvent", () => { const validatedEvents = eventsWithInvalidEvent .map((event) => validateAndMapEvent(event)) .filter((event) => event !== null); - expect(validatedEvents).toHaveLength(4); + expect(validatedEvents).toHaveLength(validTimelineEvents.length); expect(trackError).toHaveBeenCalledTimes(1); }); it("removes events with only resourceRequirement patches but does not trigger error handling", () => { @@ -139,7 +157,7 @@ describe("#validateAndMapEvent", () => { const validatedEvents = eventsWithResourceUpdate .map((event) => validateAndMapEvent(event)) .filter((event) => event !== null); - expect(validatedEvents).toHaveLength(4); + expect(validatedEvents).toHaveLength(validTimelineEvents.length); expect(trackError).toHaveBeenCalledTimes(0); }); }); diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineAllEventsList.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineAllEventsList.tsx index d7198c9a8ea..b8c030225f3 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineAllEventsList.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/ConnectionTimelineAllEventsList.tsx @@ -1,7 +1,7 @@ -import { HTMLAttributes, Ref, forwardRef, useEffect, useMemo } from "react"; +import { HTMLAttributes, Ref, forwardRef, useContext, useEffect, useMemo } from "react"; import { FormattedMessage } from "react-intl"; import { Virtuoso } from "react-virtuoso"; -import { InferType } from "yup"; +import { InferType, SchemaOf } from "yup"; import { LoadingPage } from "components"; import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; @@ -9,6 +9,7 @@ import { EmptyState } from "components/EmptyState"; import { Box } from "components/ui/Box"; import { FlexContainer } from "components/ui/Flex"; import { LoadingSpinner } from "components/ui/LoadingSpinner"; +import { ScrollParentContext } from "components/ui/ScrollParent"; import { useCurrentConnection, useGetConnectionSyncProgress, useListConnectionEventsInfinite } from "core/api"; import { ConnectionEvent, ConnectionSyncStatus } from "core/api/types/AirbyteClient"; @@ -18,10 +19,13 @@ import { ClearEventItem } from "./components/ClearEventItem"; import { ConnectionDisabledEventItem } from "./components/ConnectionDisabledEventItem"; import { ConnectionEnabledEventItem } from "./components/ConnectionEnabledEventItem"; import { ConnectionSettingsUpdateEventItem } from "./components/ConnectionSettingsUpdateEventItem"; +import { DestinationConnectorUpdateEventItem } from "./components/DestinationConnectorUpdateEventItem"; import { JobStartEventItem } from "./components/JobStartEventItem"; +import { MappingEventItem } from "./components/MappingEventItem"; import { RefreshEventItem } from "./components/RefreshEventItem"; import { RunningJobItem } from "./components/RunningJobItem"; import { SchemaUpdateEventItem } from "./components/SchemaUpdateEventItem"; +import { SourceConnectorUpdateEventItem } from "./components/SourceConnectorUpdateEventItem"; import { SyncEventItem } from "./components/SyncEventItem"; import { SyncFailEventItem } from "./components/SyncFailEventItem"; import styles from "./ConnectionTimelineAllEventsList.module.scss"; @@ -31,78 +35,67 @@ import { connectionDisabledEventSchema, connectionEnabledEventSchema, connectionSettingsUpdateEventSchema, + destinationConnectorUpdateEventSchema, generalEventSchema, jobRunningSchema, jobStartedEventSchema, + mappingEventSchema, refreshEventSchema, schemaUpdateEventSchema, + sourceConnectorUpdateEventSchema, syncEventSchema, syncFailEventSchema, } from "./types"; import { eventTypeByStatusFilterValue, TimelineFilterValues, eventTypeByTypeFilterValue } from "./utils"; +type AllSchemaEventTypes = + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType + | InferType; + +interface EventSchemaComponentMapItem { + schema: SchemaOf; + component: React.FC<{ event: T }>; +} + +const eventSchemaComponentMap = [ + { schema: jobRunningSchema, component: RunningJobItem }, + { schema: syncEventSchema, component: SyncEventItem }, + { schema: syncFailEventSchema, component: SyncFailEventItem }, + { schema: refreshEventSchema, component: RefreshEventItem }, + { schema: clearEventSchema, component: ClearEventItem }, + { schema: jobStartedEventSchema, component: JobStartEventItem }, + { schema: connectionEnabledEventSchema, component: ConnectionEnabledEventItem }, + { schema: connectionDisabledEventSchema, component: ConnectionDisabledEventItem }, + { schema: connectionSettingsUpdateEventSchema, component: ConnectionSettingsUpdateEventItem }, + { schema: schemaUpdateEventSchema, component: SchemaUpdateEventItem }, + { schema: sourceConnectorUpdateEventSchema, component: SourceConnectorUpdateEventItem }, + { schema: destinationConnectorUpdateEventSchema, component: DestinationConnectorUpdateEventItem }, + { schema: mappingEventSchema, component: MappingEventItem }, +] as Array>; + export const validateAndMapEvent = (event: ConnectionEvent | ConnectionTimelineRunningEvent) => { - if (jobRunningSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (syncEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (syncFailEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (refreshEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (clearEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (jobStartedEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (connectionEnabledEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (connectionDisabledEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (connectionSettingsUpdateEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); - } else if (schemaUpdateEventSchema.isValidSync(event, { recursive: true, stripUnknown: true })) { - return ( - - - - ); + for (const { schema, component: Component } of eventSchemaComponentMap) { + if (schema.isValidSync(event, { recursive: true, stripUnknown: true })) { + return ( + + + + ); + } } + /** * known cases for excluding timeline events that we should not trigger error reporting for: * - events with only resourceRequirement patches @@ -135,8 +128,8 @@ UlList.displayName = "UlList"; export const ConnectionTimelineAllEventsList: React.FC<{ filterValues: TimelineFilterValues; - scrollElement: HTMLDivElement | null; -}> = ({ filterValues, scrollElement }) => { +}> = ({ filterValues }) => { + const customScrollParent = useContext(ScrollParentContext); const connection = useCurrentConnection(); const { status } = useConnectionStatus(connection.connectionId); const { data: syncProgressData } = useGetConnectionSyncProgress( @@ -243,7 +236,7 @@ export const ConnectionTimelineAllEventsList: React.FC<{ <> = ({ eve }; export const ConnectionTimelinePage = () => { - const [scrollElement, setScrollElement] = useState(null); useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM_TIMELINE); const connection = useCurrentConnection(); const { openModal } = useModalService(); @@ -53,15 +51,14 @@ export const ConnectionTimelinePage = () => { openModal, jobId: !isNaN(jobIdFromFilter) ? jobIdFromFilter : undefined, eventId: filterValues.eventId, - connectionName: connection.name, + connection, attemptNumber: !isNaN(attemptNumberFromFilter) ? attemptNumberFromFilter : undefined, - connectionId: connection.connectionId, setFilterValue, }); } return ( - + @@ -84,12 +81,12 @@ export const ConnectionTimelinePage = () => { {filterValues.eventId ? ( ) : ( - + )} - + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.module.scss b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.module.scss index fa1fac4558b..14dc8501802 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.module.scss +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.module.scss @@ -1,12 +1,11 @@ +@use "scss/variables"; + .modalLoading { position: relative; flex-grow: 1; display: flex; justify-content: center; align-items: center; -} - -// fix for "dancing" scrollbar -.spinnerContainer { - overflow: clip; + flex-direction: column; + gap: variables.$spacing-lg; } diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.tsx index fcb9ef5fbc6..e02e2bb3ace 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobEventMenu.tsx @@ -3,21 +3,13 @@ import { FormattedMessage, useIntl } from "react-intl"; import { Button } from "components/ui/Button"; import { DropdownMenu, DropdownMenuOptionType } from "components/ui/DropdownMenu"; -import { FlexContainer } from "components/ui/Flex"; -import { LoadingSpinner } from "components/ui/LoadingSpinner"; import { Spinner } from "components/ui/Spinner"; +import { Text } from "components/ui/Text"; -import { - attemptHasFormattedLogs, - attemptHasStructuredLogs, - useCurrentConnection, - useCurrentWorkspace, - useGetDebugInfoJobManual, -} from "core/api"; +import { useCurrentConnection, useDonwnloadJobLogsFetchQuery } from "core/api"; +import { WebBackendConnectionRead } from "core/api/types/AirbyteClient"; import { DefaultErrorBoundary } from "core/errors"; import { copyToClipboard } from "core/utils/clipboard"; -import { trackError } from "core/utils/datadog"; -import { FILE_TYPE_DOWNLOAD, downloadFile, fileizeString } from "core/utils/file"; import { ModalOptions, ModalResult, useModalService } from "hooks/services/Modal"; import { useNotificationService } from "hooks/services/Notification"; @@ -35,17 +27,15 @@ export const openJobLogsModal = ({ openModal, jobId, eventId, - connectionName, + connection, attemptNumber, - connectionId, setFilterValue, }: { openModal: (options: ModalOptions) => Promise>; jobId?: number; eventId?: string; - connectionName: string; + connection: WebBackendConnectionRead; attemptNumber?: number; - connectionId: string; setFilterValue?: (filterName: keyof TimelineFilterValues, value: string) => void; }) => { if (!jobId && !eventId) { @@ -54,22 +44,20 @@ export const openJobLogsModal = ({ openModal({ size: "full", - title: , + title: , content: () => ( + + +
} > - + ), @@ -88,10 +76,9 @@ export const JobEventMenu: React.FC<{ eventId?: string; jobId: number; attemptCo const { formatMessage } = useIntl(); const connection = useCurrentConnection(); const { openModal } = useModalService(); - const { registerNotification, unregisterNotificationById } = useNotificationService(); + const { registerNotification } = useNotificationService(); - const { refetch: fetchJobLogs } = useGetDebugInfoJobManual(jobId); - const { name: workspaceName, workspaceId } = useCurrentWorkspace(); + const downloadJobLogs = useDonwnloadJobLogsFetchQuery(); const onChangeHandler = (optionClicked: DropdownMenuOptionType) => { switch (optionClicked.value) { @@ -100,8 +87,7 @@ export const JobEventMenu: React.FC<{ eventId?: string; jobId: number; attemptCo openModal, jobId, eventId, - connectionName: connection.name, - connectionId: connection.connectionId, + connection, }); break; @@ -124,61 +110,7 @@ export const JobEventMenu: React.FC<{ eventId?: string; jobId: number; attemptCo } case JobMenuOptions.DownloadLogs: - const notificationId = `download-logs-${jobId}`; - registerNotification({ - type: "info", - text: ( - - -
- -
-
- ), - id: notificationId, - timeout: false, - }); - // Promise.all() with a timeout is used to ensure that the notification is shown to the user for at least 1 second - Promise.all([ - fetchJobLogs() - .then(({ data }) => { - if (!data) { - throw new Error("No logs returned from server"); - } - const file = new Blob( - [ - data.attempts - .flatMap((info, index) => [ - `>> ATTEMPT ${index + 1}/${data.attempts.length}\n`, - ...(attemptHasFormattedLogs(info) ? info.logs.logLines : []), - ...(attemptHasStructuredLogs(info) ? info.logs.events.map((event) => JSON.stringify(event)) : []), - `\n\n\n`, - ]) - .join("\n"), - ], - { - type: FILE_TYPE_DOWNLOAD, - } - ); - downloadFile(file, fileizeString(`${workspaceName}-logs-${jobId}.txt`)); - }) - .catch((e) => { - trackError(e, { workspaceId, jobId }); - registerNotification({ - type: "error", - text: formatMessage( - { - id: "jobHistory.logs.logDownloadFailed", - }, - { connectionName: connection.name } - ), - id: `download-logs-error-${jobId}`, - }); - }), - new Promise((resolve) => setTimeout(resolve, 1000)), - ]).finally(() => { - unregisterNotificationById(notificationId); - }); + downloadJobLogs(connection.name, jobId); break; } }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobLogsModalContent.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobLogsModalContent.tsx index f53866d918b..71ecbdbbc49 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobLogsModalContent.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/JobLogsModalContent.tsx @@ -1,14 +1,15 @@ import { JobLogsModal } from "area/connection/components/JobLogsModal/JobLogsModal"; import { useGetConnectionEvent } from "core/api"; +import { WebBackendConnectionRead } from "core/api/types/AirbyteClient"; export const JobLogsModalContent: React.FC<{ eventId?: string; jobId?: number; attemptNumber?: number; resetFilters?: () => void; - connectionId: string; -}> = ({ eventId, jobId, attemptNumber, resetFilters, connectionId }) => { - const { data: singleEventItem } = useGetConnectionEvent(eventId ?? null, connectionId); + connection: WebBackendConnectionRead; +}> = ({ eventId, jobId, attemptNumber, resetFilters, connection }) => { + const { data: singleEventItem } = useGetConnectionEvent(eventId ?? null, connection.connectionId); const jobIdFromEvent = singleEventItem?.summary.jobId; @@ -21,7 +22,5 @@ export const JobLogsModalContent: React.FC<{ return null; } - return ( - - ); + return ; }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/ClearEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/ClearEventItem.tsx index 00ccf91997e..9e661a7874e 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/ClearEventItem.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/ClearEventItem.tsx @@ -16,13 +16,13 @@ import { clearEventSchema } from "../types"; import { getStatusByEventType, getStatusIcon, titleIdMap } from "../utils"; interface ClearEventProps { - clearEvent: InferType; + event: InferType; } -export const ClearEventItem: React.FC = ({ clearEvent }) => { +export const ClearEventItem: React.FC = ({ event }) => { const [showExtendedStats] = useLocalStorage("airbyte_extended-attempts-stats", false); - const title = titleIdMap[clearEvent.eventType]; - const jobStatus = getStatusByEventType(clearEvent.eventType); - const streamsToList = clearEvent.summary.streams.map((stream) => stream.name); + const title = titleIdMap[event.eventType]; + const jobStatus = getStatusByEventType(event.eventType); + const streamsToList = event.summary.streams.map((stream) => stream.name); return ( @@ -32,32 +32,32 @@ export const ClearEventItem: React.FC = ({ clearEvent }) => { - {jobStatus === "cancelled" && !!clearEvent.user && ( + {jobStatus === "cancelled" && !!event.user && (
- +
)} {streamsToList.length > 0 && } {showExtendedStats && ( <> - + | - + )}
); diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/DestinationConnectorUpdateEventItem.stories.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/DestinationConnectorUpdateEventItem.stories.tsx new file mode 100644 index 00000000000..3d689f8795b --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/DestinationConnectorUpdateEventItem.stories.tsx @@ -0,0 +1,97 @@ +import { StoryObj } from "@storybook/react"; + +import { ConnectorIds } from "area/connector/utils"; +import { ConnectionEventType } from "core/api/types/AirbyteClient"; + +import { DestinationConnectorUpdateEventItem } from "./DestinationConnectorUpdateEventItem"; + +export default { + title: "connection-timeline/DestinationConnectorUpdateEventItem", + component: DestinationConnectorUpdateEventItem, +} as StoryObj; + +const baseEvent = { + id: "fc62442f-1cc1-4a57-a385-11ca7507c649", + connectionId: "a90ab3d6-b5cb-4e43-8c97-5a4ab8f2f7d9", + eventType: ConnectionEventType.CONNECTOR_UPDATE, + summary: { + name: "End-to-End Testing (/dev/null)", + destinationDefinitionId: ConnectorIds.Destinations.EndToEndTesting, + }, + user: { + id: "00000000-0000-0000-0000-000000000000", + email: "volodymyr.s.petrov@globallogic.com", + name: "Volodymyr Petrov", + }, + createdAt: 1732114841, +}; + +export const UpgradedVersionByUser: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "2.1.0", + oldDockerImageTag: "2.0.5", + changeReason: "USER", + }, + }, + }, +}; + +export const UpgradedVersionBySystem: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "2.1.0", + oldDockerImageTag: "2.0.5", + changeReason: "SYSTEM", + }, + }, + }, +}; + +export const DowngradedVersionByUser: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "1.0.1", + oldDockerImageTag: "3.0.0", + changeReason: "USER", + }, + }, + }, +}; + +export const DowngradedVersionBySystem: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "1.0.1", + oldDockerImageTag: "3.0.0", + changeReason: "SYSTEM", + }, + }, + }, +}; + +export const UpdatedVersion: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "4.0", + oldDockerImageTag: "3.5.6-rc1", + changeReason: "USER", + }, + }, + }, +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/DestinationConnectorUpdateEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/DestinationConnectorUpdateEventItem.tsx new file mode 100644 index 00000000000..78cc286f6e3 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/DestinationConnectorUpdateEventItem.tsx @@ -0,0 +1,52 @@ +import { FormattedMessage } from "react-intl"; +import { InferType } from "yup"; + +import { FlexContainer } from "components/ui/Flex"; +import { Text } from "components/ui/Text"; + +import { TimelineEventUser } from "./TimelineEventUser"; +import { ConnectionTimelineEventActions } from "../ConnectionTimelineEventActions"; +import { ConnectionTimelineEventIcon } from "../ConnectionTimelineEventIcon"; +import { ConnectionTimelineEventItem } from "../ConnectionTimelineEventItem"; +import { ConnectionTimelineEventSummary } from "../ConnectionTimelineEventSummary"; +import { destinationConnectorUpdateEventSchema } from "../types"; +import { isSemanticVersionTags, isVersionUpgraded } from "../utils"; + +interface ConnectionEnabledEventItemProps { + event: InferType; +} + +export const DestinationConnectorUpdateEventItem: React.FC = ({ event }) => { + const description = "connection.timeline.connector_update.destination.version.description.reason"; + + const messageId = isSemanticVersionTags(event.summary.newDockerImageTag, event.summary.oldDockerImageTag) + ? isVersionUpgraded(event.summary.newDockerImageTag, event.summary.oldDockerImageTag) + ? `${description}.upgraded.${event.summary.changeReason}` + : `${description}.downgraded.${event.summary.changeReason}` + : `${description}.updated.${event.summary.changeReason}`; + + return ( + + + + + + + + + , + destination: event.summary.name, + from: event.summary.oldDockerImageTag, + to: event.summary.newDockerImageTag, + }} + /> + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/JobStartEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/JobStartEventItem.tsx index 1514e0f6e57..802fe1d72a8 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/JobStartEventItem.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/JobStartEventItem.tsx @@ -15,32 +15,32 @@ import { jobStartedEventSchema } from "../types"; import { titleIdMap } from "../utils"; interface JobStartEventItemProps { - jobStartEvent: InferType; + event: InferType; } -export const JobStartEventItem: React.FC = ({ jobStartEvent }) => { - const titleId = titleIdMap[jobStartEvent.eventType]; +export const JobStartEventItem: React.FC = ({ event }) => { + const titleId = titleIdMap[event.eventType]; const { descriptionId, icon, streamsCount } = useMemo<{ descriptionId: string; icon: IconType; streamsCount: number | undefined; }>(() => { - if (jobStartEvent.eventType === "CLEAR_STARTED") { + if (event.eventType === "CLEAR_STARTED") { return { descriptionId: "connection.timeline.clear_started.description", icon: "cross", - streamsCount: jobStartEvent.summary.streams?.length ?? 0, + streamsCount: event.summary.streams?.length ?? 0, }; - } else if (jobStartEvent.eventType === "REFRESH_STARTED") { + } else if (event.eventType === "REFRESH_STARTED") { return { descriptionId: "connection.timeline.refresh_started.description", icon: "rotate", - streamsCount: jobStartEvent.summary.streams?.length ?? 0, + streamsCount: event.summary.streams?.length ?? 0, }; } return { descriptionId: "connection.timeline.sync_started.description", icon: "sync", streamsCount: undefined }; - }, [jobStartEvent.eventType, jobStartEvent.summary.streams]); + }, [event.eventType, event.summary.streams]); return ( @@ -54,14 +54,14 @@ export const JobStartEventItem: React.FC = ({ jobStartEv , + user: , ...(streamsCount !== undefined && { value: streamsCount }), }} /> - + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/MappingEventItem.stories.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/MappingEventItem.stories.tsx new file mode 100644 index 00000000000..74d47eccd02 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/MappingEventItem.stories.tsx @@ -0,0 +1,75 @@ +import { StoryObj } from "@storybook/react"; + +import { MappingEventItem } from "./MappingEventItem"; + +export default { + title: "connection-timeline/MappingEventItem", + component: MappingEventItem, +} as StoryObj; + +const baseEvent = { + id: "fc62442f-1cc1-4a57-a385-11ca7507c649", + connectionId: "a90ab3d6-b5cb-4e43-8c97-5a4ab8f2f7d9", + user: { + id: "00000000-0000-0000-0000-000000000000", + email: "volodymyr.s.petrov@globallogic.com", + name: "Volodymyr Petrov", + }, + createdAt: 1732114841, +}; + +export const CreateMapping: StoryObj = { + args: { + event: { + ...baseEvent, + eventType: "MAPPING_CREATE", + summary: { + streamName: "users", + streamNamespace: undefined, + mapperType: "field-renaming", + }, + }, + }, +}; + +export const CreateMappingWithNamespace: StoryObj = { + args: { + event: { + ...baseEvent, + eventType: "MAPPING_CREATE", + summary: { + streamName: "users", + streamNamespace: "public", + mapperType: "field-renaming", + }, + }, + }, +}; + +export const UpdateMapping: StoryObj = { + args: { + event: { + ...baseEvent, + eventType: "MAPPING_UPDATE", + summary: { + streamName: "pokemon", + streamNamespace: undefined, + mapperType: "hashing", + }, + }, + }, +}; + +export const DeleteMapping: StoryObj = { + args: { + event: { + ...baseEvent, + eventType: "MAPPING_DELETE", + summary: { + streamName: "pokemon", + streamNamespace: undefined, + mapperType: "encryption", + }, + }, + }, +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/MappingEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/MappingEventItem.tsx new file mode 100644 index 00000000000..be45e5a55e8 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/MappingEventItem.tsx @@ -0,0 +1,62 @@ +import { FormattedMessage } from "react-intl"; +import { InferType } from "yup"; + +import { FlexContainer } from "components/ui/Flex"; +import { Text } from "components/ui/Text"; + +import { StreamMapperType } from "core/api/types/AirbyteClient"; + +import { TimelineEventUser } from "./TimelineEventUser"; +import { ConnectionTimelineEventActions } from "../ConnectionTimelineEventActions"; +import { ConnectionTimelineEventIcon } from "../ConnectionTimelineEventIcon"; +import { ConnectionTimelineEventItem } from "../ConnectionTimelineEventItem"; +import { ConnectionTimelineEventSummary } from "../ConnectionTimelineEventSummary"; +import { mappingEventSchema } from "../types"; + +// TODO: import from titleIdMap in utils.tsx once it's added +// issue_link: https://github.com/airbytehq/airbyte-internal-issues/issues/10947 +export const mappingTitleIdMap: Record = { + MAPPING_CREATE: "connection.timeline.mapping_create", + MAPPING_UPDATE: "connection.timeline.mapping_update", + MAPPING_DELETE: "connection.timeline.mapping_delete", +}; + +const mapperTypeToMessageIdMap: Record = { + [StreamMapperType.hashing]: "hashing", + [StreamMapperType["field-renaming"]]: "field_renaming", + [StreamMapperType["row-filtering"]]: "row_filtering", + [StreamMapperType.encryption]: "encryption", +}; + +interface MappingEventItemProps { + event: InferType; +} + +export const MappingEventItem: React.FC = ({ event }) => { + const messageId = mappingTitleIdMap[event.eventType]; + const mapperTypeId = mapperTypeToMessageIdMap[event.summary.mapperType]; + + return ( + + + + + + + + + , + stream: event.summary.streamName, + namespace: event.summary.streamNamespace, + }} + /> + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/RefreshEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/RefreshEventItem.tsx index a8c339f10c0..71b37b8dd45 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/RefreshEventItem.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/RefreshEventItem.tsx @@ -16,12 +16,12 @@ import { refreshEventSchema } from "../types"; import { getStatusByEventType, getStatusIcon, titleIdMap } from "../utils"; interface RefreshEventItemProps { - refreshEvent: InferType; + event: InferType; } -export const RefreshEventItem: React.FC = ({ refreshEvent }) => { - const titleId = titleIdMap[refreshEvent.eventType]; - const jobStatus = getStatusByEventType(refreshEvent.eventType); - const streamsToList = refreshEvent.summary.streams.map((stream) => stream.name); +export const RefreshEventItem: React.FC = ({ event }) => { + const titleId = titleIdMap[event.eventType]; + const jobStatus = getStatusByEventType(event.eventType); + const streamsToList = event.summary.streams.map((stream) => stream.name); return ( @@ -33,19 +33,15 @@ export const RefreshEventItem: React.FC = ({ refreshEvent - {jobStatus === "cancelled" && !!refreshEvent.user && ( - + {jobStatus === "cancelled" && !!event.user && ( + )} - + {streamsToList.length > 0 && } - + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/RunningJobItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/RunningJobItem.tsx index c3a4efa96a1..a751735bf74 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/RunningJobItem.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/RunningJobItem.tsx @@ -6,40 +6,36 @@ import { RefreshRunningItem } from "./RefreshRunningItem"; import { SyncRunningItem } from "./SyncRunningItem"; import { jobRunningSchema } from "../types"; -export const RunningJobItem: React.FC<{ jobRunningItem: InferType }> = React.memo( - ({ jobRunningItem }) => { - if (!jobRunningItem || !jobRunningItem.createdAt || !jobRunningItem.summary) { - return null; - } +interface RunningJobItemProps { + event: InferType; +} + +export const RunningJobItem: React.FC = React.memo(({ event }) => { + if (!event || !event.createdAt || !event.summary) { + return null; + } - const getStreams = (types: string[]) => - jobRunningItem.summary.streams - .filter((stream) => types.includes(stream.configType)) - .map((stream) => stream.streamName); + const getStreams = (types: string[]) => + event.summary.streams.filter((stream) => types.includes(stream.configType)).map((stream) => stream.streamName); - switch (jobRunningItem.summary.configType) { - case "sync": - return ; - case "refresh": - return ( - - ); - case "clear": - case "reset_connection": - return ( - - ); - default: - return null; - } + switch (event.summary.configType) { + case "sync": + return ; + case "refresh": + return ( + + ); + case "clear": + case "reset_connection": + return ( + + ); + default: + return null; } -); +}); RunningJobItem.displayName = "RunningJobItem"; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SourceConnectorUpdateEventItem.stories.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SourceConnectorUpdateEventItem.stories.tsx new file mode 100644 index 00000000000..e7466ca076d --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SourceConnectorUpdateEventItem.stories.tsx @@ -0,0 +1,96 @@ +import { StoryObj } from "@storybook/react"; + +import { ConnectorIds } from "area/connector/utils"; +import { ConnectionEventType } from "core/api/types/AirbyteClient"; + +import { SourceConnectorUpdateEventItem } from "./SourceConnectorUpdateEventItem"; + +export default { + title: "connection-timeline/SourceConnectorUpdateEventItem", + component: SourceConnectorUpdateEventItem, +} as StoryObj; + +const baseEvent = { + id: "fc62442f-1cc1-4a57-a385-11ca7507c649", + connectionId: "a90ab3d6-b5cb-4e43-8c97-5a4ab8f2f7d9", + eventType: ConnectionEventType.CONNECTOR_UPDATE, + summary: { + name: "PokeAPI", + sourceDefinitionId: ConnectorIds.Sources.PokeApi, + }, + user: { + id: "00000000-0000-0000-0000-000000000000", + email: "volodymyr.s.petrov@globallogic.com", + name: "Volodymyr Petrov", + }, + createdAt: 1732114841, +}; +export const UpgradedVersionByUser: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "2.1.0", + oldDockerImageTag: "2.0.5", + changeReason: "USER", + }, + }, + }, +}; + +export const UpgradedVersionBySystem: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "2.1.0", + oldDockerImageTag: "2.0.5", + changeReason: "SYSTEM", + }, + }, + }, +}; + +export const DowngradedVersionByUser: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "1.0.1", + oldDockerImageTag: "3.0.0", + changeReason: "USER", + }, + }, + }, +}; + +export const DowngradedVersionBySystem: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "1.0.1", + oldDockerImageTag: "3.0.0", + changeReason: "SYSTEM", + }, + }, + }, +}; + +export const UpdatedVersionByUser: StoryObj = { + args: { + event: { + ...baseEvent, + summary: { + ...baseEvent.summary, + newDockerImageTag: "dev", + oldDockerImageTag: "3.0.1", + changeReason: "USER", + }, + }, + }, +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SourceConnectorUpdateEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SourceConnectorUpdateEventItem.tsx new file mode 100644 index 00000000000..b8d92ed6b70 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SourceConnectorUpdateEventItem.tsx @@ -0,0 +1,52 @@ +import { FormattedMessage } from "react-intl"; +import { InferType } from "yup"; + +import { FlexContainer } from "components/ui/Flex"; +import { Text } from "components/ui/Text"; + +import { TimelineEventUser } from "./TimelineEventUser"; +import { ConnectionTimelineEventActions } from "../ConnectionTimelineEventActions"; +import { ConnectionTimelineEventIcon } from "../ConnectionTimelineEventIcon"; +import { ConnectionTimelineEventItem } from "../ConnectionTimelineEventItem"; +import { ConnectionTimelineEventSummary } from "../ConnectionTimelineEventSummary"; +import { sourceConnectorUpdateEventSchema } from "../types"; +import { isSemanticVersionTags, isVersionUpgraded } from "../utils"; + +interface ConnectionEnabledEventItemProps { + event: InferType; +} + +export const SourceConnectorUpdateEventItem: React.FC = ({ event }) => { + const description = "connection.timeline.connector_update.source.version.description.reason"; + + const messageId = isSemanticVersionTags(event.summary.newDockerImageTag, event.summary.oldDockerImageTag) + ? isVersionUpgraded(event.summary.newDockerImageTag, event.summary.oldDockerImageTag) + ? `${description}.upgraded.${event.summary.changeReason}` + : `${description}.downgraded.${event.summary.changeReason}` + : `${description}.updated.${event.summary.changeReason}`; + + return ( + + + + + + + + + , + source: event.summary.name, + from: event.summary.oldDockerImageTag, + to: event.summary.newDockerImageTag, + }} + /> + + + + + + ); +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SyncEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SyncEventItem.tsx index 83638bb89d9..237569292d4 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SyncEventItem.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SyncEventItem.tsx @@ -15,13 +15,13 @@ import { syncEventSchema } from "../types"; import { getStatusByEventType, getStatusIcon, titleIdMap } from "../utils"; interface SyncEventProps { - syncEvent: InferType; + event: InferType; } -export const SyncEventItem: React.FC = ({ syncEvent }) => { - const titleId = titleIdMap[syncEvent.eventType]; +export const SyncEventItem: React.FC = ({ event }) => { + const titleId = titleIdMap[event.eventType]; - const jobStatus = getStatusByEventType(syncEvent.eventType); + const jobStatus = getStatusByEventType(event.eventType); return ( @@ -32,18 +32,12 @@ export const SyncEventItem: React.FC = ({ syncEvent }) => { - {jobStatus === "cancelled" && !!syncEvent.user && ( - - )} - + {jobStatus === "cancelled" && !!event.user && } + - + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SyncFailEventItem.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SyncFailEventItem.tsx index d5b5cd8ab6d..e20a4bd15b3 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SyncFailEventItem.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/SyncFailEventItem.tsx @@ -18,17 +18,17 @@ import { syncFailEventSchema } from "../types"; import { getStatusByEventType, getStatusIcon, titleIdMap } from "../utils"; interface SyncFailEventItemProps { - syncEvent: InferType; + event: InferType; } -export const SyncFailEventItem: React.FC = ({ syncEvent }) => { +export const SyncFailEventItem: React.FC = ({ event }) => { const [showExtendedStats] = useLocalStorage("airbyte_extended-attempts-stats", false); const { formatMessage } = useIntl(); - const titleId = titleIdMap[syncEvent.eventType]; + const titleId = titleIdMap[event.eventType]; - const failureUiDetails = failureUiDetailsFromReason(syncEvent.summary.failureReason, formatMessage); - const jobStatus = getStatusByEventType(syncEvent.eventType); + const failureUiDetails = failureUiDetailsFromReason(event.summary.failureReason, formatMessage); + const jobStatus = getStatusByEventType(event.eventType); return ( @@ -37,7 +37,7 @@ export const SyncFailEventItem: React.FC = ({ syncEvent - + {failureUiDetails && ( @@ -45,15 +45,11 @@ export const SyncFailEventItem: React.FC = ({ syncEvent )} {!failureUiDetails && showExtendedStats && ( - + )} - + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/types.ts b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/types.ts index cd8f2933177..656fd02dc49 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/types.ts +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/types.ts @@ -13,6 +13,7 @@ import { NamespaceDefinitionType, NonBreakingChangesPreference, StreamAttributeTransformTransformType, + StreamMapperType, StreamTransformTransformType, } from "core/api/types/AirbyteClient"; @@ -29,18 +30,26 @@ import { */ const connectionAutoDisabledReasons = [ - "ONLY_FAILED_JOBS_RECENTLY", - "TOO_MANY_CONSECUTIVE_FAILED_JOBS_IN_A_ROW", + "TOO_MANY_FAILED_JOBS_WITH_NO_RECENT_SUCCESS", "SCHEMA_CHANGES_ARE_BREAKING", "DISABLE_CONNECTION_IF_ANY_SCHEMA_CHANGES", "INVALID_CREDIT_BALANCE", "CONNECTOR_NOT_SUPPORTED", "WORKSPACE_IS_DELINQUENT", + "INVOICE_MARKED_UNCOLLECTIBLE", + "INVALID_PAYMENT_METHOD", // this is from `ConnectionAutoUpdatedReason` but is also stamped onto the disabledReason field "SCHEMA_CHANGE_AUTO_PROPAGATE", + + // these two are no longer written for new events, but can exist in existing timelines. + // can be removed once all such events are expired/removed + "ONLY_FAILED_JOBS_RECENTLY", + "TOO_MANY_CONSECUTIVE_FAILED_JOBS_IN_A_ROW", ]; +const connectorChangeReasons = ["SYSTEM", "USER"]; + // property-specific schemas /** * @typedef {import("core/api/types/AirbyteClient").StreamDescriptor} @@ -130,6 +139,22 @@ const catalogDiffSchema = yup.object({ transforms: yup.array().of(streamTransformsSchema).required(), }); +const sourceDefinitionUpdateSchema = yup.object({ + name: yup.string().required(), + sourceDefinitionId: yup.string().required(), + newDockerImageTag: yup.string().required(), + oldDockerImageTag: yup.string().required(), + changeReason: yup.string().oneOf(connectorChangeReasons).required(), +}); + +const destinationDefinitionUpdateSchema = yup.object({ + name: yup.string().required(), + destinationDefinitionId: yup.string().required(), + newDockerImageTag: yup.string().required(), + oldDockerImageTag: yup.string().required(), + changeReason: yup.string().oneOf(connectorChangeReasons).required(), +}); + export type TimelineFailureReason = Omit; export const jobFailureReasonSchema = yup.object({ @@ -272,6 +297,15 @@ export const schemaUpdateSummarySchema = yup.object({ updateReason: yup.mixed().oneOf(["SCHEMA_CHANGE_AUTO_PROPAGATE"]).optional(), }); +export const mappingEventSummarySchema = yup.object({ + streamName: yup.string().required(), + streamNamespace: yup.string().optional(), + mapperType: yup + .mixed() + .oneOf([...Object.values(StreamMapperType)]) + .required(), +}); + /** * @typedef {import("core/api/types/AirbyteClient").ConnectionEvent} */ @@ -361,6 +395,23 @@ export const schemaUpdateEventSchema = generalEventSchema.shape({ summary: schemaUpdateSummarySchema.required(), }); +export const sourceConnectorUpdateEventSchema = generalEventSchema.shape({ + eventType: yup.mixed().oneOf([ConnectionEventType.CONNECTOR_UPDATE]).required(), + summary: sourceDefinitionUpdateSchema.required(), +}); + +export const destinationConnectorUpdateEventSchema = generalEventSchema.shape({ + eventType: yup.mixed().oneOf([ConnectionEventType.CONNECTOR_UPDATE]).required(), + summary: destinationDefinitionUpdateSchema.required(), +}); + +export const mappingEventSchema = generalEventSchema.shape({ + // TODO: add mapping event types from AirbyteClient once they are defined + // eventType: yup.mixed().oneOf([ConnectionEventType.MAPPING_CREATE, ConnectionEventType.MAPPING_UPDATE, ConnectionEventType.MAPPING_DELETE]).required(), + eventType: yup.mixed().oneOf(["MAPPING_CREATE", "MAPPING_UPDATE", "MAPPING_DELETE"]).required(), + summary: mappingEventSummarySchema.required(), +}); + export interface ConnectionTimelineRunningEvent { id: string; eventType: string; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/utils.test.ts b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/utils.test.ts new file mode 100644 index 00000000000..42dcda485d2 --- /dev/null +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/utils.test.ts @@ -0,0 +1,33 @@ +import { isSemanticVersionTags, isVersionUpgraded } from "./utils"; + +describe(`'${isSemanticVersionTags.name}'`, () => { + it("should return true for valid semantic version tags", () => { + expect(isSemanticVersionTags("2.0.0", "1.0.0")).toBe(true); + expect(isSemanticVersionTags("3.0.0", "3.0.1")).toBe(true); + }); + + it("should return false for invalid semantic version tags", () => { + expect(isSemanticVersionTags("1", "2.0")).toBe(false); + expect(isSemanticVersionTags("1.0", "2.0.0")).toBe(false); + expect(isSemanticVersionTags("1.0.0", "dev")).toBe(false); + expect(isSemanticVersionTags("dev", "1.0.0")).toBe(false); + expect(isSemanticVersionTags("1.0.0", "1.0.0-rc1")).toBe(false); + }); +}); + +describe(`'${isVersionUpgraded.name}'`, () => { + it("should return true if newVersion is greater than oldVersion", () => { + expect(isVersionUpgraded("2.0", "1.0")).toBe(true); + expect(isVersionUpgraded("1.1", "1.0")).toBe(true); + expect(isVersionUpgraded("1.0.1", "1.0.0")).toBe(true); + expect(isVersionUpgraded("1.0.0.1", "1.0.0.0")).toBe(true); + }); + + it("should return false if oldVersion is less than or equal to newVersion", () => { + expect(isVersionUpgraded("1.0", "2.0")).toBe(false); + expect(isVersionUpgraded("1.0", "1.1")).toBe(false); + expect(isVersionUpgraded("1.0.0", "1.0.1")).toBe(false); + expect(isVersionUpgraded("1.0.0.0", "1.0.0.1")).toBe(false); + expect(isVersionUpgraded("1.0.0", "1.0.0")).toBe(false); + }); +}); diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/utils.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/utils.tsx index 61d7c88ee3f..b53f74e07fe 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/utils.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/utils.tsx @@ -34,6 +34,12 @@ export const titleIdMap: Record = { // todo [ConnectionEventType.CONNECTOR_UPDATE]: "", [ConnectionEventType.UNKNOWN]: "", + + // TODO: waiting for the backend to add these + // issue_link: https://github.com/airbytehq/airbyte-internal-issues/issues/10947 + // [ConnectionEventType.MAPPING_CREATE]: "connection.timeline.mapping_create", + // [ConnectionEventType.MAPPING_UPDATE]: "connection.timeline.mapping_update", + // [ConnectionEventType.MAPPING_DELETE]: "connection.timeline.mapping_delete", }; /** @@ -219,3 +225,24 @@ export const eventTypeFilterOptions = (filterValues: TimelineFilterValues) => { : []), ]; }; + +export const isSemanticVersionTags = (newTag: string, oldTag: string): boolean => + [newTag, oldTag].every((tag) => /^\d+\.\d+\.\d+$/.test(tag)); + +export const isVersionUpgraded = (newVersion: string, oldVersion: string): boolean => { + const parseVersion = (version: string) => version.split(".").map(Number); + const newParsedVersion = parseVersion(newVersion); + const oldParsedVersion = parseVersion(oldVersion); + + for (let i = 0; i < Math.max(newParsedVersion.length, oldParsedVersion.length); i++) { + const num1 = newParsedVersion[i] || 0; + const num2 = oldParsedVersion[i] || 0; + if (num1 > num2) { + return true; + } + if (num1 < num2) { + return false; + } + } + return false; +}; diff --git a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/ConnectionTransformationPage.tsx b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/ConnectionTransformationPage.tsx index 923981ed9f1..82753dc4589 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/ConnectionTransformationPage.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTransformationPage/ConnectionTransformationPage.tsx @@ -1,8 +1,8 @@ import React from "react"; import { PageContainer } from "components/PageContainer"; -import { ScrollableContainer } from "components/ScrollableContainer"; import { FlexContainer } from "components/ui/Flex"; +import { ScrollParent } from "components/ui/ScrollParent"; import { useTrackPage, PageTrackingCodes } from "core/services/analytics"; @@ -12,12 +12,12 @@ export const ConnectionTransformationPage: React.FC = () => { useTrackPage(PageTrackingCodes.CONNECTIONS_ITEM_TRANSFORMATION); return ( - + - + ); }; diff --git a/airbyte-webapp/src/pages/connections/ConnectionsRoutes.tsx b/airbyte-webapp/src/pages/connections/ConnectionsRoutes.tsx index 537ff52e905..e50c6d088bf 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionsRoutes.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionsRoutes.tsx @@ -15,6 +15,8 @@ const ConnectionPage = React.lazy(() => import("./ConnectionPage")); const ConnectionReplicationPage = React.lazy(() => import("./ConnectionReplicationPage")); const ConnectionSettingsPage = React.lazy(() => import("./ConnectionSettingsPage")); const ConnectionTransformationPage = React.lazy(() => import("./ConnectionTransformationPage")); +const ConnectionMappingsPage = React.lazy(() => import("./ConnectionMappingsPage")); + const AllConnectionsPage = React.lazy(() => import("./AllConnectionsPage")); const StreamStatusPage = React.lazy(() => import("./StreamStatusPage")); export const JobHistoryToTimelineRedirect = () => { @@ -70,6 +72,7 @@ export const ConnectionsRoutes: React.FC = () => { } /> } /> } /> + } /> } /> } /> } /> diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.module.scss b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.module.scss index cba5df0765e..fb902a65807 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.module.scss +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamStatusPage.module.scss @@ -2,9 +2,7 @@ .container { grid-area: connection-page-content; - overflow-y: auto; display: flex; gap: variables.$spacing-md; flex-direction: column; - padding: variables.$spacing-xl; } diff --git a/airbyte-webapp/src/pages/connectorBuilder/ConnectorBuilderListPage/ConnectorBuilderListPage.tsx b/airbyte-webapp/src/pages/connectorBuilder/ConnectorBuilderListPage/ConnectorBuilderListPage.tsx index 9720e3fb6ef..fbac4c1a01a 100644 --- a/airbyte-webapp/src/pages/connectorBuilder/ConnectorBuilderListPage/ConnectorBuilderListPage.tsx +++ b/airbyte-webapp/src/pages/connectorBuilder/ConnectorBuilderListPage/ConnectorBuilderListPage.tsx @@ -5,6 +5,7 @@ import { MainPageWithScroll } from "components"; import { ConnectorBuilderProjectTable } from "components/ConnectorBuilderProjectTable"; import { HeadTitle } from "components/HeadTitle"; import { Button } from "components/ui/Button"; +import { FlexContainer } from "components/ui/Flex"; import { Heading } from "components/ui/Heading"; import { PageHeader } from "components/ui/PageHeader"; @@ -43,7 +44,9 @@ export const ConnectorBuilderListPage: React.FC = () => { /> } > - + + + ) : ( diff --git a/airbyte-webapp/src/pages/destination/AllDestinationsPage/AllDestinationsPage.module.scss b/airbyte-webapp/src/pages/destination/AllDestinationsPage/AllDestinationsPage.module.scss index f62e341db72..341bdc5e00b 100644 --- a/airbyte-webapp/src/pages/destination/AllDestinationsPage/AllDestinationsPage.module.scss +++ b/airbyte-webapp/src/pages/destination/AllDestinationsPage/AllDestinationsPage.module.scss @@ -12,6 +12,7 @@ $filters-fixed-height: 108px; .pageBody { grid-area: page-content; overflow-y: auto; + padding-top: 0; } .card { diff --git a/airbyte-webapp/src/pages/destination/AllDestinationsPage/AllDestinationsPage.tsx b/airbyte-webapp/src/pages/destination/AllDestinationsPage/AllDestinationsPage.tsx index 7bb36c4ebba..f1c2c98c691 100644 --- a/airbyte-webapp/src/pages/destination/AllDestinationsPage/AllDestinationsPage.tsx +++ b/airbyte-webapp/src/pages/destination/AllDestinationsPage/AllDestinationsPage.tsx @@ -72,43 +72,38 @@ const AllDestinationsPageInner: React.FC<{ destinations: DestinationRead[] }> = } /> - - -
- - - - setFilterValue("search", value)} + +
+ + + + setFilterValue("search", value)} /> + + + + setFilterValue("status", value)} /> - - - setFilterValue("status", value)} - /> - - - -
-
- - - - } - /> -
-
-
+ + +
+
+ + + + } + /> +
+
diff --git a/airbyte-webapp/src/pages/routePaths.tsx b/airbyte-webapp/src/pages/routePaths.tsx index 152b460ad58..f00f278b66e 100644 --- a/airbyte-webapp/src/pages/routePaths.tsx +++ b/airbyte-webapp/src/pages/routePaths.tsx @@ -35,6 +35,7 @@ export const enum ConnectionRoutePaths { Configure = "configure", ConfigureContinued = "continued", Timeline = "timeline", + Mappings = "mappings", } export enum SettingsRoutePaths { diff --git a/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.module.scss b/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.module.scss index f62e341db72..f908fce5316 100644 --- a/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.module.scss +++ b/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.module.scss @@ -11,7 +11,7 @@ $filters-fixed-height: 108px; .pageBody { grid-area: page-content; - overflow-y: auto; + padding-top: 0; } .card { diff --git a/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.tsx b/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.tsx index 71540aa1dfb..1adaa4de89f 100644 --- a/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.tsx +++ b/airbyte-webapp/src/pages/source/AllSourcesPage/AllSourcesPage.tsx @@ -64,43 +64,38 @@ const AllSourcesPageInner: React.FC<{ sources: SourceRead[] }> = ({ sources }) = } /> - - -
- - - - setFilterValue("search", value)} + +
+ + + + setFilterValue("search", value)} /> + + + + setFilterValue("status", value)} /> - - - setFilterValue("status", value)} - /> - - - -
-
- - - - } - /> -
-
-
+ + +
+
+ + + + } + /> +
+
diff --git a/airbyte-webapp/src/pages/source/SourceConnectionsPage/SourceConnectionTable.module.scss b/airbyte-webapp/src/pages/source/SourceConnectionsPage/SourceConnectionTable.module.scss new file mode 100644 index 00000000000..74e6df5b983 --- /dev/null +++ b/airbyte-webapp/src/pages/source/SourceConnectionsPage/SourceConnectionTable.module.scss @@ -0,0 +1,3 @@ +.container { + padding-top: 0; +} diff --git a/airbyte-webapp/src/pages/source/SourceConnectionsPage/SourceConnectionTable.tsx b/airbyte-webapp/src/pages/source/SourceConnectionsPage/SourceConnectionTable.tsx index b71c3dcd35c..c48e32e98fe 100644 --- a/airbyte-webapp/src/pages/source/SourceConnectionsPage/SourceConnectionTable.tsx +++ b/airbyte-webapp/src/pages/source/SourceConnectionsPage/SourceConnectionTable.tsx @@ -2,23 +2,22 @@ import React from "react"; import { ConnectionTable } from "components/EntityTable"; import { getConnectionTableData } from "components/EntityTable/utils"; -import { Box } from "components/ui/Box"; import { ScrollParent } from "components/ui/ScrollParent"; import { WebBackendConnectionListItem } from "core/api/types/AirbyteClient"; -interface IProps { +import styles from "./SourceConnectionTable.module.scss"; + +interface SourceConnectionTableProps { connections: WebBackendConnectionListItem[]; } -const SourceConnectionTable: React.FC = ({ connections }) => { +const SourceConnectionTable: React.FC = ({ connections }) => { const data = getConnectionTableData(connections, "source"); return ( - - - - + + ); }; diff --git a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx index 395620781d0..faacc76fb1f 100644 --- a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx +++ b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx @@ -4,6 +4,7 @@ import cloneDeep from "lodash/cloneDeep"; import isEqual from "lodash/isEqual"; import merge from "lodash/merge"; import toPath from "lodash/toPath"; +import { editor, Position } from "monaco-editor"; import React, { useCallback, useContext, useEffect, useMemo, useRef, useState } from "react"; import { useFormContext, UseFormReturn } from "react-hook-form"; import { useIntl } from "react-intl"; @@ -155,6 +156,13 @@ interface FormManagementStateContext { setScrollToField: (field: string | undefined) => void; stateKey: number; setStateKey: React.Dispatch>; + newUserInputContext: NewUserInputContext | undefined; + setNewUserInputContext: (context: NewUserInputContext | undefined) => void; +} + +interface NewUserInputContext { + model: editor.ITextModel; + position: Position; } export const ConnectorBuilderFormStateContext = React.createContext(null); @@ -289,7 +297,9 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< const streams = useBuilderWatch("formValues.streams"); const streamNames = - mode === "ui" ? streams.map((stream) => stream.name) : resolvedManifest.streams.map((stream) => stream.name ?? ""); + mode === "ui" + ? streams.map((stream) => stream.name) + : resolvedManifest.streams?.map((stream) => stream.name ?? "") ?? []; useEffect(() => { if (name !== currentProject.name) { @@ -326,7 +336,10 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< setValue("mode", "yaml"); } else { const confirmDiscard = (errorMessage: string) => { - if (isEqual(formValues, DEFAULT_BUILDER_FORM_VALUES) && jsonManifest.streams.length > 0) { + if ( + isEqual(formValues, DEFAULT_BUILDER_FORM_VALUES) && + (!jsonManifest.streams || jsonManifest.streams.length > 0) + ) { openNoUiValueModal(errorMessage); } else { openConfirmationModal({ @@ -417,7 +430,7 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< }; const view = getValues("view"); - if (typeof view === "number" && manifest.streams.length <= view) { + if (typeof view === "number" && manifest.streams && manifest.streams.length <= view) { // switch back to global view if the selected stream does not exist anymore setValue("view", "global"); } @@ -672,6 +685,9 @@ export function useInitializedBuilderProject() { * With this, we will only require testing streams that the user changes. */ function setInitialStreamHashes(persistedManifest: ConnectorManifest, resolvedManifest: ConnectorManifest) { + if (!persistedManifest.streams || !resolvedManifest.streams) { + return; + } if (persistedManifest.streams.length !== resolvedManifest.streams.length) { // this should never happen, since resolving a manifest should never affect the number of streams throw new Error("Persisted manifest streams length doesn't match resolved streams length"); @@ -787,7 +803,7 @@ export const ConnectorBuilderTestReadProvider: React.FC { + if (!testStream) { + return; + } + if (result.latest_config_update) { setValue("testingValues", result.latest_config_update); } @@ -855,6 +875,7 @@ export const ConnectorBuilderTestReadProvider: React.FC(undefined); const [stateKey, setStateKey] = useState(0); + const [newUserInputContext, setNewUserInputContext] = useState(undefined); const handleScrollToField = useCallback( (ref: React.RefObject, path: string) => { @@ -1033,8 +1055,10 @@ export const ConnectorBuilderFormManagementStateProvider: React.FC { ); }; -const BrandingBadge: React.FC<{ product: "enterprise" | "cloudForTeams" }> = ({ product }) => ( +export const BrandingBadge: React.FC<{ product: "enterprise" | "cloudForTeams" }> = ({ product }) => ( diff --git a/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx b/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx index c10da081714..1a222078227 100644 --- a/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx +++ b/airbyte-webapp/src/views/layout/SideBar/SideBar.tsx @@ -6,18 +6,12 @@ import { matchPath, useLocation } from "react-router-dom"; import { AdminWorkspaceWarning } from "components/ui/AdminWorkspaceWarning"; import { FlexContainer } from "components/ui/Flex"; import { Icon } from "components/ui/Icon"; -import { ExternalLink } from "components/ui/Link"; import { ThemeToggle } from "components/ui/ThemeToggle"; -import { Tooltip } from "components/ui/Tooltip"; import { WorkspacesPicker } from "components/workspace/WorkspacesPicker"; import type { WorkspaceFetcher } from "components/workspace/WorkspacesPickerList"; import { useAuthService } from "core/services/auth"; import { FeatureItem, IfFeatureEnabled } from "core/services/features"; -import { links } from "core/utils/links"; -import { useExperiment } from "hooks/services/Experiment"; -import { useShowBillingPageV2 } from "packages/cloud/area/billing/utils/useShowBillingPage"; -import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; import { ConnectorBuilderRoutePaths } from "pages/connectorBuilder/ConnectorBuilderRoutes"; import { RoutePaths } from "pages/routePaths"; @@ -37,43 +31,6 @@ const HIDDEN_SIDEBAR_PATHS = [ `${RoutePaths.Workspaces}/:workspaceId/${RoutePaths.ConnectorBuilder}/${ConnectorBuilderRoutePaths.Edit}`, ]; -const BillingPageLink: React.FC = () => { - const showBillingPageV2 = useShowBillingPageV2(); - const isBillingMigrationMaintenance = useExperiment("billing.migrationMaintenance"); - - if (showBillingPageV2) { - return null; - } - - return ( - } - to={CloudRoutes.Billing} - testId="creditsButton" - /> - } - disabled={!isBillingMigrationMaintenance} - > - ( - - {node} - - ), - }} - /> - - ); -}; - export const SideBar: React.FC> = ({ workspaceFetcher, bottomSlot, @@ -127,9 +84,6 @@ export const SideBar: React.FC> = ({ testId="builderLink" to={RoutePaths.ConnectorBuilder} /> - - - } icon="gear" diff --git a/airbyte-worker-models/build.gradle.kts b/airbyte-worker-models/build.gradle.kts index 18b6f11a77c..2689ad204e6 100644 --- a/airbyte-worker-models/build.gradle.kts +++ b/airbyte-worker-models/build.gradle.kts @@ -7,9 +7,6 @@ plugins { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - implementation(platform(libs.fasterxml)) implementation(libs.bundles.jackson) implementation(project(":oss:airbyte-commons")) @@ -30,3 +27,7 @@ jsonSchema2Pojo { includeConstructors = false includeSetters = true } + +tasks.named("compileKotlin") { + dependsOn(tasks.named("generateJsonSchema2Pojo")) +} diff --git a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/JobInput.java b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/JobInput.java deleted file mode 100644 index f7857612713..00000000000 --- a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/JobInput.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import io.airbyte.config.StandardSyncInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * Generated job input. - */ -@Data -@NoArgsConstructor -@AllArgsConstructor -public class JobInput { - - private JobRunConfig jobRunConfig; - private IntegrationLauncherConfig sourceLauncherConfig; - private IntegrationLauncherConfig destinationLauncherConfig; - private StandardSyncInput syncInput; - -} diff --git a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityInput.java b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityInput.java deleted file mode 100644 index aa20da0e360..00000000000 --- a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityInput.java +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * A class holding the input to the Temporal schema refresh activity. - */ -@Data -@NoArgsConstructor -@AllArgsConstructor -public class RefreshSchemaActivityInput { - - // The id of the source catalog associated with this connection. - private UUID sourceCatalogId; - // The id of the connection for which we're refreshing the schema. - private UUID connectionId; - // The workspace that contains the connection, used mostly for feature flagging. - private UUID workspaceId; - -} diff --git a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityOutput.java b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityOutput.java deleted file mode 100644 index 569639d9772..00000000000 --- a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/RefreshSchemaActivityOutput.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import io.airbyte.config.CatalogDiff; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * A class holding the output to the Temporal schema refresh activity. - */ -@Data -@NoArgsConstructor -@AllArgsConstructor -public class RefreshSchemaActivityOutput { - - private CatalogDiff appliedDiff; - -} diff --git a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/ReplicationActivityInput.java b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/ReplicationActivityInput.java deleted file mode 100644 index 00c7fdc2ce1..00000000000 --- a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/ReplicationActivityInput.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import com.fasterxml.jackson.annotation.JsonIgnoreProperties; -import com.fasterxml.jackson.databind.JsonNode; -import io.airbyte.config.ConnectionContext; -import io.airbyte.config.JobSyncConfig; -import io.airbyte.config.SyncResourceRequirements; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * A class holding the input to the Temporal replication activity. - */ -@Data -@NoArgsConstructor -@AllArgsConstructor -@JsonIgnoreProperties(ignoreUnknown = true) -public class ReplicationActivityInput { - - // Actor ID for the source used in the sync - this is used to update the actor configuration when - // requested. - private UUID sourceId; - // Actor ID for the destination used in the sync - this is used to update the actor configuration - // when requested. - private UUID destinationId; - // Source-connector specific blob. Must be a valid JSON string. - private JsonNode sourceConfiguration; - // Destination-connector specific blob. Must be a valid JSON string. - private JsonNode destinationConfiguration; - // The job info -- job id, attempt id -- for this sync. - private JobRunConfig jobRunConfig; - // Config related to the source launcher (rather than the source itself) e.g., whether it's a custom - // connector. - private IntegrationLauncherConfig sourceLauncherConfig; - // Config related to the destination launcher (rather than the destination itself) e.g., whether it - // supports DBT - // transformations. - private IntegrationLauncherConfig destinationLauncherConfig; - // Resource requirements to use for the sync. - private SyncResourceRequirements syncResourceRequirements; - // The id of the workspace associated with this sync. - private UUID workspaceId; - // The id of the connection associated with this sync. - private UUID connectionId; - // The task queue that replication will use. - private String taskQueue; - // Whether this 'sync' is performing a logical reset. - private Boolean isReset; - // The type of namespace definition - e.g. source, destination, or custom. - private JobSyncConfig.NamespaceDefinitionType namespaceDefinition; - private String namespaceFormat; - // Prefix that will be prepended to the name of each stream when it is written to the destination. - private String prefix; - // The results of schema refresh, including the applied diff which is used to determine which - // streams to backfill. - private RefreshSchemaActivityOutput schemaRefreshOutput; - // Replication context object containing relevant IDs - private ConnectionContext connectionContext; - private String signalInput; - -} diff --git a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/SidecarInput.java b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/SidecarInput.java deleted file mode 100644 index 15462432dad..00000000000 --- a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/SidecarInput.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.config.StandardDiscoverCatalogInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -@Data -@NoArgsConstructor -@AllArgsConstructor -public class SidecarInput { - - public enum OperationType { - CHECK, - DISCOVER, - SPEC, - } - - StandardCheckConnectionInput checkConnectionInput; - StandardDiscoverCatalogInput discoverCatalogInput; - String workloadId; - IntegrationLauncherConfig integrationLauncherConfig; - OperationType operationType; - String logPath; - -} diff --git a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/SyncJobCheckConnectionInputs.java b/airbyte-worker-models/src/main/java/io/airbyte/workers/models/SyncJobCheckConnectionInputs.java deleted file mode 100644 index 1803506ef34..00000000000 --- a/airbyte-worker-models/src/main/java/io/airbyte/workers/models/SyncJobCheckConnectionInputs.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.models; - -import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * GeneratedJobInput. - */ -@Data -@NoArgsConstructor -@AllArgsConstructor -public class SyncJobCheckConnectionInputs { - - private IntegrationLauncherConfig sourceLauncherConfig; - private IntegrationLauncherConfig destinationLauncherConfig; - private StandardCheckConnectionInput sourceCheckConnectionInput; - private StandardCheckConnectionInput destinationCheckConnectionInput; - -} diff --git a/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/JobInput.kt b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/JobInput.kt new file mode 100644 index 00000000000..f965164839d --- /dev/null +++ b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/JobInput.kt @@ -0,0 +1,12 @@ +package io.airbyte.workers.models + +import io.airbyte.config.StandardSyncInput +import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig + +data class JobInput( + val jobRunConfig: JobRunConfig? = null, + val sourceLauncherConfig: IntegrationLauncherConfig? = null, + val destinationLauncherConfig: IntegrationLauncherConfig? = null, + val syncInput: StandardSyncInput? = null, +) diff --git a/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/RefreshSchemaActivityInput.kt b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/RefreshSchemaActivityInput.kt new file mode 100644 index 00000000000..504661a89da --- /dev/null +++ b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/RefreshSchemaActivityInput.kt @@ -0,0 +1,15 @@ +package io.airbyte.workers.models + +import java.util.UUID + +/** + * A class holding the input to the Temporal schema refresh activity. + */ +data class RefreshSchemaActivityInput( + // The id of the source catalog associated with this connection. + val sourceCatalogId: UUID? = null, +// The id of the connection for which we're refreshing the schema. + val connectionId: UUID? = null, +// The workspace that contains the connection, used mostly for feature flagging. + val workspaceId: UUID? = null, +) diff --git a/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/RefreshSchemaActivityOutput.kt b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/RefreshSchemaActivityOutput.kt new file mode 100644 index 00000000000..1bf7e11ab00 --- /dev/null +++ b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/RefreshSchemaActivityOutput.kt @@ -0,0 +1,10 @@ +package io.airbyte.workers.models + +import io.airbyte.config.CatalogDiff + +/** + * A class holding the output to the Temporal schema refresh activity. + */ +data class RefreshSchemaActivityOutput( + val appliedDiff: CatalogDiff? = null, +) diff --git a/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/ReplicationActivityInput.kt b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/ReplicationActivityInput.kt new file mode 100644 index 00000000000..8a99906cfb1 --- /dev/null +++ b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/ReplicationActivityInput.kt @@ -0,0 +1,58 @@ +package io.airbyte.workers.models + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties +import com.fasterxml.jackson.databind.JsonNode +import io.airbyte.config.ConnectionContext +import io.airbyte.config.JobSyncConfig +import io.airbyte.config.SyncResourceRequirements +import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig +import java.util.UUID + +/** + * A class holding the input to the Temporal replication activity. + */ +@JsonIgnoreProperties(ignoreUnknown = true) +data class ReplicationActivityInput( + // Actor ID for the source used in the sync - this is used to update the actor configuration when + // requested. + val sourceId: UUID? = null, +// Actor ID for the destination used in the sync - this is used to update the actor configuration +// when requested. + val destinationId: UUID? = null, +// Source-connector specific blob. Must be a valid JSON string. + var sourceConfiguration: JsonNode? = null, +// Destination-connector specific blob. Must be a valid JSON string. + var destinationConfiguration: JsonNode? = null, +// The job info -- job id, attempt id -- for this sync. + val jobRunConfig: JobRunConfig? = null, +// Config related to the source launcher (rather than the source itself) e.g., whether it's a custom +// connector. + val sourceLauncherConfig: IntegrationLauncherConfig? = null, +// Config related to the destination launcher (rather than the destination itself) e.g., whether it +// supports DBT +// transformations. + val destinationLauncherConfig: IntegrationLauncherConfig? = null, +// Resource requirements to use for the sync. + val syncResourceRequirements: SyncResourceRequirements? = null, +// The id of the workspace associated with this sync. + val workspaceId: UUID? = null, +// The id of the connection associated with this sync. + val connectionId: UUID? = null, +// The task queue that replication will use. + val taskQueue: String? = null, +// Whether this 'sync' is performing a logical reset. + var isReset: Boolean? = null, +// The type of namespace definition - e.g. source, destination, or custom. + val namespaceDefinition: JobSyncConfig.NamespaceDefinitionType? = null, + val namespaceFormat: String? = null, +// Prefix that will be prepended to the name of each stream when it is written to the destination. + val prefix: String? = null, +// The results of schema refresh, including the applied diff which is used to determine which +// streams to backfill. + var schemaRefreshOutput: RefreshSchemaActivityOutput? = null, +// Replication context object containing relevant IDs + val connectionContext: ConnectionContext? = null, + val signalInput: String? = null, + val networkSecurityTokens: List? = null, +) diff --git a/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/SidecarInput.kt b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/SidecarInput.kt new file mode 100644 index 00000000000..c8b20b2b7ec --- /dev/null +++ b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/SidecarInput.kt @@ -0,0 +1,20 @@ +package io.airbyte.workers.models + +import io.airbyte.config.StandardCheckConnectionInput +import io.airbyte.config.StandardDiscoverCatalogInput +import io.airbyte.persistence.job.models.IntegrationLauncherConfig + +data class SidecarInput( + val checkConnectionInput: StandardCheckConnectionInput?, + val discoverCatalogInput: StandardDiscoverCatalogInput?, + val workloadId: String, + val integrationLauncherConfig: IntegrationLauncherConfig, + val operationType: OperationType, + val logPath: String, +) { + enum class OperationType { + CHECK, + DISCOVER, + SPEC, + } +} diff --git a/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/SyncJobCheckConnectionInputs.kt b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/SyncJobCheckConnectionInputs.kt new file mode 100644 index 00000000000..a114e9dffa5 --- /dev/null +++ b/airbyte-worker-models/src/main/kotlin/io/airbyte/workers/models/SyncJobCheckConnectionInputs.kt @@ -0,0 +1,14 @@ +package io.airbyte.workers.models + +import io.airbyte.config.StandardCheckConnectionInput +import io.airbyte.persistence.job.models.IntegrationLauncherConfig + +/** + * GeneratedJobInput. + */ +data class SyncJobCheckConnectionInputs( + val sourceLauncherConfig: IntegrationLauncherConfig? = null, + val destinationLauncherConfig: IntegrationLauncherConfig? = null, + val sourceCheckConnectionInput: StandardCheckConnectionInput? = null, + val destinationCheckConnectionInput: StandardCheckConnectionInput? = null, +) diff --git a/airbyte-worker-models/src/main/resources/workers_models/ReplicationInput.yaml b/airbyte-worker-models/src/main/resources/workers_models/ReplicationInput.yaml index 04a49e5a066..3d524610673 100644 --- a/airbyte-worker-models/src/main/resources/workers_models/ReplicationInput.yaml +++ b/airbyte-worker-models/src/main/resources/workers_models/ReplicationInput.yaml @@ -91,3 +91,8 @@ properties: default: false connectionContext: existingJavaType: io.airbyte.config.ConnectionContext + networkSecurityTokens: + description: Workload launcher will use these to grant pods additional network access + type: array + items: + type: string diff --git a/airbyte-workers/build.gradle.kts b/airbyte-workers/build.gradle.kts index 4f00d80242a..50f759e0bb9 100644 --- a/airbyte-workers/build.gradle.kts +++ b/airbyte-workers/build.gradle.kts @@ -34,8 +34,6 @@ configurations.all { } dependencies { - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) @@ -103,8 +101,6 @@ dependencies { runtimeOnly(libs.javax.databind) runtimeOnly(libs.bundles.logback) - testCompileOnly(libs.lombok) - testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/ApplicationInitializer.java b/airbyte-workers/src/main/java/io/airbyte/workers/ApplicationInitializer.java index 2fcf4eacf52..cb2ced46fca 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/ApplicationInitializer.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/ApplicationInitializer.java @@ -38,6 +38,7 @@ import jakarta.inject.Inject; import jakarta.inject.Named; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.util.Arrays; import java.util.List; import java.util.Optional; @@ -47,16 +48,18 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Performs any required initialization logic on application context start. */ @Singleton @Requires(notEnv = {Environment.TEST}) -@Slf4j public class ApplicationInitializer implements ApplicationEventListener { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + @Inject @Named("checkConnectionActivities") private Optional> checkConnectionActivities; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/config/ApplicationBeanFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/config/ApplicationBeanFactory.java index d6569182638..5c93c91d21d 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/config/ApplicationBeanFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/config/ApplicationBeanFactory.java @@ -21,13 +21,11 @@ import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.function.Supplier; -import lombok.extern.slf4j.Slf4j; /** * Micronaut bean factory for general singletons. */ @Factory -@Slf4j @SuppressWarnings("PMD.AvoidDuplicateLiterals") public class ApplicationBeanFactory { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/config/CloudStorageBeanFactory.java b/airbyte-workers/src/main/java/io/airbyte/workers/config/CloudStorageBeanFactory.java index a0d1738eddb..d21f167a461 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/config/CloudStorageBeanFactory.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/config/CloudStorageBeanFactory.java @@ -30,6 +30,12 @@ public StorageClient logStorageClient(final StorageClientFactory factory) { return factory.create(DocumentType.LOGS); } + @Singleton + @Named("auditLoggingDocumentStore") + public StorageClient auditLoggingStorageClient(final StorageClientFactory factory) { + return factory.create(DocumentType.AUDIT_LOGS); + } + @Singleton @Named("stateDocumentStore") public StorageClient stateStorageClient(final StorageClientFactory factory) { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/helpers/ProgressChecker.java b/airbyte-workers/src/main/java/io/airbyte/workers/helpers/ProgressChecker.java index 20ab9471492..b46e4a6b02d 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/helpers/ProgressChecker.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/helpers/ProgressChecker.java @@ -12,13 +12,11 @@ import jakarta.inject.Singleton; import java.io.IOException; import java.util.Optional; -import lombok.extern.slf4j.Slf4j; import org.openapitools.client.infrastructure.ClientException; /** * Composes all the business and request logic for checking progress of a run. */ -@Slf4j @Singleton public class ProgressChecker { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/helpers/RetryStateClient.java b/airbyte-workers/src/main/java/io/airbyte/workers/helpers/RetryStateClient.java index b1aa638f92c..b90506174f3 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/helpers/RetryStateClient.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/helpers/RetryStateClient.java @@ -28,21 +28,23 @@ import io.micronaut.http.HttpStatus; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.time.Duration; import java.util.List; import java.util.Optional; import java.util.UUID; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; import org.openapitools.client.infrastructure.ClientException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Business and request logic for retrieving and persisting retry state data. */ -@Slf4j @Singleton public class RetryStateClient { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + final AirbyteApiClient airbyteApiClient; final FeatureFlagClient featureFlagClient; final Integer successiveCompleteFailureLimit; @@ -83,23 +85,28 @@ public RetryStateClient(final AirbyteApiClient airbyteApiClient, * @throws RetryableException — Delegates to Temporal to retry for now (retryWithJitter swallowing * 404's is problematic). */ - @SneakyThrows public RetryManager hydrateRetryState(final Long jobId, final UUID workspaceId) throws RetryableException { - final var organizationId = fetchOrganizationId(workspaceId); + try { + final var organizationId = fetchOrganizationId(workspaceId); - final var manager = initializeBuilder(workspaceId, organizationId); + final var manager = initializeBuilder(workspaceId, organizationId); - final var state = Optional.ofNullable(jobId).flatMap(this::fetchRetryState); + final var state = Optional.ofNullable(jobId).flatMap(this::fetchRetryState); - // if there is retry state we hydrate - // otherwise we will build with default 0 values - state.ifPresent(s -> manager - .totalCompleteFailures(s.getTotalCompleteFailures()) - .totalPartialFailures(s.getTotalPartialFailures()) - .successiveCompleteFailures(s.getSuccessiveCompleteFailures()) - .successivePartialFailures(s.getSuccessivePartialFailures())); + // if there is retry state we hydrate + // otherwise we will build with default 0 values + state.ifPresent(s -> manager + .totalCompleteFailures(s.getTotalCompleteFailures()) + .totalPartialFailures(s.getTotalPartialFailures()) + .successiveCompleteFailures(s.getSuccessiveCompleteFailures()) + .successivePartialFailures(s.getSuccessivePartialFailures())); - return manager.build(); + return manager.build(); + } catch (final RetryableException e) { + throw e; + } catch (final Exception e) { + throw new RuntimeException(e); + } } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/helpers/ScheduleJitterHelper.java b/airbyte-workers/src/main/java/io/airbyte/workers/helpers/ScheduleJitterHelper.java index 353b26ab61d..0d3bd92b8ee 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/helpers/ScheduleJitterHelper.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/helpers/ScheduleJitterHelper.java @@ -7,17 +7,20 @@ import io.airbyte.api.client.model.generated.ConnectionScheduleType; import io.micronaut.context.annotation.Value; import jakarta.inject.Singleton; +import java.lang.invoke.MethodHandles; import java.time.Duration; import java.util.Random; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Helper to compute and apply random jitter to scheduled connections. */ @Singleton -@Slf4j public class ScheduleJitterHelper { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final int noJitterCutoffMinutes; private final int highFrequencyThresholdMinutes; private final int highFrequencyJitterAmountMinutes; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java index 806a4c7dc06..d69563f0195 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java @@ -23,13 +23,11 @@ import jakarta.inject.Singleton; import java.time.Duration; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; /** * Check connection activity temporal implementation for the control plane. */ @Singleton -@Slf4j @SuppressWarnings("PMD.ExceptionAsFlowControl") public class CheckConnectionActivityImpl implements CheckConnectionActivity { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java index 11836857a35..804da9719a1 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java @@ -21,13 +21,11 @@ import io.temporal.activity.ActivityExecutionContext; import jakarta.inject.Singleton; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; /** * DiscoverCatalogActivityImpl. */ @Singleton -@Slf4j @SuppressWarnings("PMD.ExceptionAsFlowControl") public class DiscoverCatalogActivityImpl implements DiscoverCatalogActivity { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityImpl.java index 9d56c8a0a1b..642180bc7f7 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityImpl.java @@ -47,7 +47,10 @@ public void reportFailure() { public PostprocessCatalogOutput postprocess(final PostprocessCatalogInput input) { try { Objects.requireNonNull(input.getConnectionId()); - Objects.requireNonNull(input.getCatalogId()); + + if (input.getCatalogId() == null) { + return PostprocessCatalogOutput.Companion.success(null); + } final var reqBody = new PostprocessDiscoveredCatalogRequestBody( input.getCatalogId(), diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java index bfa46d8e5db..0aa2ab295d0 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowImpl.java @@ -95,6 +95,7 @@ import io.temporal.workflow.ChildWorkflowOptions; import io.temporal.workflow.Workflow; import jakarta.annotation.Nullable; +import java.lang.invoke.MethodHandles; import java.time.Duration; import java.util.HashSet; import java.util.Map; @@ -104,15 +105,17 @@ import java.util.UUID; import java.util.function.Consumer; import java.util.function.Function; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * ConnectionManagerWorkflowImpl. */ -@Slf4j @SuppressWarnings("PMD.AvoidDuplicateLiterals") public class ConnectionManagerWorkflowImpl implements ConnectionManagerWorkflow { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String GENERATE_CHECK_INPUT_TAG = "generate_check_input"; private static final int GENERATE_CHECK_INPUT_CURRENT_VERSION = 1; private static final String CHECK_WORKSPACE_TOMBSTONE_TAG = "check_workspace_tombstone"; @@ -477,7 +480,7 @@ private void recordProgressMetric(final ConnectionUpdaterInput input, final Fail private boolean isWithinRetryLimit(final int attemptNumber) { if (useAttemptCountRetries()) { - final int maxAttempt = configFetchActivity.getMaxAttempt().getMaxAttempt(); + final int maxAttempt = configFetchActivity.getMaxAttempt().maxAttempt(); return maxAttempt > attemptNumber; } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionUpdaterWorkflowConfig.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionUpdaterWorkflowConfig.java deleted file mode 100644 index 310febda74d..00000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ConnectionUpdaterWorkflowConfig.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling; - -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * ConnectionUpdaterWorkflowConfig. - */ -@Data -@NoArgsConstructor -@AllArgsConstructor -public class ConnectionUpdaterWorkflowConfig { - - private boolean firstStart; - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ManualSyncOutput.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ManualSyncOutput.java deleted file mode 100644 index 6fdffa09161..00000000000 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/ManualSyncOutput.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.scheduling; - -import lombok.Data; -import lombok.NoArgsConstructor; - -/** - * ManualSyncOutput. - */ -@Data -@NoArgsConstructor -public class ManualSyncOutput { - - private boolean submitted; - -} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/SyncCheckConnectionResult.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/SyncCheckConnectionResult.java index 7eeeaf41262..39d18963693 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/SyncCheckConnectionResult.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/SyncCheckConnectionResult.java @@ -12,15 +12,18 @@ import io.airbyte.persistence.job.models.JobRunConfig; import io.airbyte.workers.helper.FailureHelper; import io.airbyte.workers.temporal.sync.SyncOutputProvider; +import java.lang.invoke.MethodHandles; import java.util.List; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * SyncCheckConnectionFailure. */ -@Slf4j public class SyncCheckConnectionResult { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final Long jobId; private final Integer attemptId; private ConnectorJobOutput failureOutput; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AppendToAttemptLogActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AppendToAttemptLogActivity.java index 618f5d39b56..a8370dfbc12 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AppendToAttemptLogActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AppendToAttemptLogActivity.java @@ -6,9 +6,7 @@ import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; +import java.util.Objects; /** * Activity for adding messages to a given attempt log from workflows. @@ -28,9 +26,6 @@ enum LogLevel { /** * Input for append to attempt log activity method. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class LogInput { private Long jobId; @@ -38,18 +33,109 @@ class LogInput { private String message; private LogLevel level; + public LogInput() {} + + public LogInput(Long jobId, Integer attemptNumber, String message, LogLevel level) { + this.jobId = jobId; + this.attemptNumber = attemptNumber; + this.message = message; + this.level = level; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getAttemptNumber() { + return attemptNumber; + } + + public void setAttemptNumber(Integer attemptNumber) { + this.attemptNumber = attemptNumber; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public LogLevel getLevel() { + return level; + } + + public void setLevel(LogLevel level) { + this.level = level; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + LogInput logInput = (LogInput) o; + return Objects.equals(jobId, logInput.jobId) && Objects.equals(attemptNumber, logInput.attemptNumber) + && Objects.equals(message, logInput.message) && level == logInput.level; + } + + @Override + public int hashCode() { + return Objects.hash(jobId, attemptNumber, message, level); + } + + @Override + public String toString() { + return "LogInput{jobId=" + jobId + ", attemptNumber=" + attemptNumber + ", message='" + message + '\'' + ", level=" + level + '}'; + } + } /** * Output for append to attempt log activity method. */ - @AllArgsConstructor - @NoArgsConstructor - @Data class LogOutput { private Boolean success; + public LogOutput() {} + + public LogOutput(Boolean success) { + this.success = success; + } + + public Boolean getSuccess() { + return success; + } + + public void setSuccess(Boolean success) { + this.success = success; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + LogOutput logOutput = (LogOutput) o; + return Objects.equals(success, logOutput.success); + } + + @Override + public int hashCode() { + return Objects.hashCode(success); + } + + @Override + public String toString() { + return "LogOutput{success=" + success + '}'; + } + } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivity.java index e256cc888e7..0e4a390ad70 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/AutoDisableConnectionActivity.java @@ -7,10 +7,8 @@ import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; import java.time.Instant; +import java.util.Objects; import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; /** * AutoDisableConnectionActivity. @@ -21,9 +19,6 @@ public interface AutoDisableConnectionActivity { /** * AutoDisableConnectionActivityInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class AutoDisableConnectionActivityInput { private UUID connectionId; @@ -31,18 +26,85 @@ class AutoDisableConnectionActivityInput { @Deprecated(forRemoval = true) private Instant currTimestamp; + public AutoDisableConnectionActivityInput() {} + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + public Instant getCurrTimestamp() { + return currTimestamp; + } + + public void setCurrTimestamp(Instant currTimestamp) { + this.currTimestamp = currTimestamp; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + AutoDisableConnectionActivityInput that = (AutoDisableConnectionActivityInput) o; + return Objects.equals(connectionId, that.connectionId) && Objects.equals(currTimestamp, that.currTimestamp); + } + + @Override + public int hashCode() { + return Objects.hash(connectionId, currTimestamp); + } + + @Override + public String toString() { + return "AutoDisableConnectionActivityInput{connectionId=" + connectionId + ", currTimestamp=" + currTimestamp + '}'; + } + } /** * AutoDisableConnectionOutput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class AutoDisableConnectionOutput { private boolean disabled; + public AutoDisableConnectionOutput(boolean disabled) { + this.disabled = disabled; + } + + public AutoDisableConnectionOutput() {} + + public boolean isDisabled() { + return disabled; + } + + public void setDisabled(boolean disabled) { + this.disabled = disabled; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + AutoDisableConnectionOutput that = (AutoDisableConnectionOutput) o; + return disabled == that.disabled; + } + + @Override + public int hashCode() { + return Objects.hashCode(disabled); + } + + @Override + public String toString() { + return "AutoDisableConnectionOutput{disabled=" + disabled + '}'; + } + } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/CheckRunProgressActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/CheckRunProgressActivity.java index 558d04226ca..8df1a39dcd9 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/CheckRunProgressActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/CheckRunProgressActivity.java @@ -6,11 +6,8 @@ import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; +import java.util.Objects; import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; -import lombok.experimental.Accessors; /** * Activity to check whether a given run (attempt for now) made progress. Output to be used as input @@ -22,28 +19,102 @@ public interface CheckRunProgressActivity { /** * Input object for CheckRunProgressActivity#checkProgress. */ - @AllArgsConstructor - @NoArgsConstructor - @Data class Input { private Long jobId; private Integer attemptNo; private UUID connectionId; + public Input(Long jobId, Integer attemptNo, UUID connectionId) { + this.jobId = jobId; + this.attemptNo = attemptNo; + this.connectionId = connectionId; + } + + public Input() {} + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getAttemptNo() { + return attemptNo; + } + + public void setAttemptNo(Integer attemptNo) { + this.attemptNo = attemptNo; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + Input input = (Input) o; + return Objects.equals(jobId, input.jobId) && Objects.equals(attemptNo, input.attemptNo) && Objects.equals( + connectionId, input.connectionId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, attemptNo, connectionId); + } + + @Override + public String toString() { + return "Input{jobId=" + jobId + ", attemptNo=" + attemptNo + ", connectionId=" + connectionId + '}'; + } + } /** * Output object for CheckRunProgressActivity#checkProgress. */ - @AllArgsConstructor - @NoArgsConstructor - @Data class Output { - @Accessors(fluent = true) private Boolean madeProgress; + public Output(Boolean madeProgress) { + this.madeProgress = madeProgress; + } + + public Output() {} + + public Boolean madeProgress() { + return madeProgress; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + Output output = (Output) o; + return Objects.equals(madeProgress, output.madeProgress); + } + + @Override + public int hashCode() { + return Objects.hashCode(madeProgress); + } + + @Override + public String toString() { + return "Output{madeProgress=" + madeProgress + '}'; + } + } @ActivityMethod diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/CheckRunProgressActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/CheckRunProgressActivityImpl.java index 8f160f8b1c1..cc065e4b451 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/CheckRunProgressActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/CheckRunProgressActivityImpl.java @@ -8,12 +8,10 @@ import io.airbyte.workers.helpers.ProgressChecker; import jakarta.inject.Singleton; import java.io.IOException; -import lombok.extern.slf4j.Slf4j; /** * Concrete CheckRunProgressActivity. */ -@Slf4j @Singleton public class CheckRunProgressActivityImpl implements CheckRunProgressActivity { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivity.java index dbac24e99cd..790a3442912 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivity.java @@ -9,11 +9,9 @@ import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; import java.time.Duration; +import java.util.Objects; import java.util.Optional; import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; /** * ConfigFetchActivity. @@ -33,25 +31,85 @@ public interface ConfigFetchActivity { /** * ScheduleRetrieverInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class ScheduleRetrieverInput { private UUID connectionId; + public ScheduleRetrieverInput() {} + + public ScheduleRetrieverInput(UUID connectionId) { + this.connectionId = connectionId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + ScheduleRetrieverInput that = (ScheduleRetrieverInput) o; + return Objects.equals(connectionId, that.connectionId); + } + + @Override + public int hashCode() { + return Objects.hashCode(connectionId); + } + + @Override + public String toString() { + return "ScheduleRetrieverInput{connectionId=" + connectionId + '}'; + } + } /** * ScheduleRetrieverOutput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class ScheduleRetrieverOutput { private Duration timeToWait; + public ScheduleRetrieverOutput() {} + + public ScheduleRetrieverOutput(Duration timeToWait) { + this.timeToWait = timeToWait; + } + + public Duration getTimeToWait() { + return timeToWait; + } + + public void setTimeToWait(Duration timeToWait) { + this.timeToWait = timeToWait; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + ScheduleRetrieverOutput that = (ScheduleRetrieverOutput) o; + return Objects.equals(timeToWait, that.timeToWait); + } + + @Override + public int hashCode() { + return Objects.hashCode(timeToWait); + } + + @Override + public String toString() { + return "ScheduleRetrieverOutput{timeToWait=" + timeToWait + '}'; + } + } /** @@ -65,14 +123,7 @@ class ScheduleRetrieverOutput { /** * GetMaxAttemptOutput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor - class GetMaxAttemptOutput { - - private int maxAttempt; - - } + record GetMaxAttemptOutput(int maxAttempt) {} /** * Return the maximum number of attempt allowed for a connection. diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivityImpl.java index 3e6a074b9b7..9be48307fff 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivityImpl.java @@ -38,6 +38,7 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.text.ParseException; import java.time.DateTimeException; import java.time.Duration; @@ -49,7 +50,6 @@ import java.util.TimeZone; import java.util.UUID; import java.util.function.Supplier; -import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTimeZone; import org.openapitools.client.infrastructure.ClientException; import org.quartz.CronExpression; @@ -59,11 +59,11 @@ /** * ConfigFetchActivityImpl. */ -@Slf4j @Singleton public class ConfigFetchActivityImpl implements ConfigFetchActivity { - private static final Logger LOGGER = LoggerFactory.getLogger(ConfigFetchActivityImpl.class); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final long MS_PER_SECOND = 1000L; private static final long MIN_CRON_INTERVAL_SECONDS = 60; private static final Set SCHEDULING_NOISE_WORKSPACE_IDS = Set.of( @@ -205,7 +205,7 @@ private Duration addSchedulingNoiseForAllowListedWorkspace(final Duration timeTo // We really do want to add some scheduling noise for this connection. final long minutesToWait = (long) (Math.random() * SCHEDULING_NOISE_CONSTANT); - LOGGER.debug("Adding {} minutes noise to wait", minutesToWait); + log.debug("Adding {} minutes noise to wait", minutesToWait); // Note: we add an extra second to make the unit tests pass in case `minutesToWait` was 0. return timeToWait.plusMinutes(minutesToWait).plusSeconds(1); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivity.java index a5947ee9a41..c09a446ce13 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivity.java @@ -7,10 +7,8 @@ import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; import java.util.Map; +import java.util.Objects; import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; /** * Activity that fetches feature flags. @@ -21,25 +19,85 @@ public interface FeatureFlagFetchActivity { /** * Feature flag fetch input. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class FeatureFlagFetchInput { private UUID connectionId; + public FeatureFlagFetchInput() {} + + public FeatureFlagFetchInput(UUID connectionId) { + this.connectionId = connectionId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + FeatureFlagFetchInput that = (FeatureFlagFetchInput) o; + return Objects.equals(connectionId, that.connectionId); + } + + @Override + public int hashCode() { + return Objects.hashCode(connectionId); + } + + @Override + public String toString() { + return "FeatureFlagFetchInput{connectionId=" + connectionId + '}'; + } + } /** * Feature flag fetch output. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class FeatureFlagFetchOutput { private Map featureFlags; + public FeatureFlagFetchOutput() {} + + public FeatureFlagFetchOutput(Map featureFlags) { + this.featureFlags = featureFlags; + } + + public Map getFeatureFlags() { + return featureFlags; + } + + public void setFeatureFlags(Map featureFlags) { + this.featureFlags = featureFlags; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + FeatureFlagFetchOutput that = (FeatureFlagFetchOutput) o; + return Objects.equals(featureFlags, that.featureFlags); + } + + @Override + public int hashCode() { + return Objects.hashCode(featureFlags); + } + + @Override + public String toString() { + return "FeatureFlagFetchOutput{featureFlags=" + featureFlags + '}'; + } + } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivityImpl.java index 4eb5c7a755a..25bf93a66e5 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivityImpl.java @@ -14,18 +14,21 @@ import io.micronaut.http.HttpStatus; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.Map; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; import org.openapitools.client.infrastructure.ClientException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Fetches feature flags to be used in temporal workflows. */ -@Slf4j @Singleton public class FeatureFlagFetchActivityImpl implements FeatureFlagFetchActivity { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AirbyteApiClient airbyteApiClient; private final FeatureFlagClient featureFlagClient; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java index 52c74bb8208..0c86e774005 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/GenerateInputActivity.java @@ -8,9 +8,7 @@ import io.airbyte.workers.models.SyncJobCheckConnectionInputs; import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; +import java.util.Objects; /** * GenerateInputActivity. @@ -21,27 +19,105 @@ public interface GenerateInputActivity { /** * SyncInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class SyncInput { private int attemptId; private long jobId; + public SyncInput() {} + + public SyncInput(int attemptId, long jobId) { + this.attemptId = attemptId; + this.jobId = jobId; + } + + public int getAttemptId() { + return attemptId; + } + + public void setAttemptId(int attemptId) { + this.attemptId = attemptId; + } + + public long getJobId() { + return jobId; + } + + public void setJobId(long jobId) { + this.jobId = jobId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + SyncInput syncInput = (SyncInput) o; + return attemptId == syncInput.attemptId && jobId == syncInput.jobId; + } + + @Override + public int hashCode() { + return Objects.hash(attemptId, jobId); + } + + @Override + public String toString() { + return "SyncInput{attemptId=" + attemptId + ", jobId=" + jobId + '}'; + } + } /** * SyncInputWithAttemptNumber. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class SyncInputWithAttemptNumber { private int attemptNumber; private long jobId; + public SyncInputWithAttemptNumber() {} + + public SyncInputWithAttemptNumber(int attemptNumber, long jobId) { + this.attemptNumber = attemptNumber; + this.jobId = jobId; + } + + public int getAttemptNumber() { + return attemptNumber; + } + + public void setAttemptNumber(int attemptNumber) { + this.attemptNumber = attemptNumber; + } + + public long getJobId() { + return jobId; + } + + public void setJobId(long jobId) { + this.jobId = jobId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + SyncInputWithAttemptNumber that = (SyncInputWithAttemptNumber) o; + return attemptNumber == that.attemptNumber && jobId == that.jobId; + } + + @Override + public int hashCode() { + return Objects.hash(attemptNumber, jobId); + } + + @Override + public String toString() { + return "SyncInputWithAttemptNumber{attemptNumber=" + attemptNumber + ", jobId=" + jobId + '}'; + } + } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java index d2b9fa6ec60..4cf21874ed1 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivity.java @@ -9,39 +9,98 @@ import io.airbyte.config.StandardSyncOutput; import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; +import java.util.Objects; import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; /** * JobCreationAndStatusUpdateActivity. */ @ActivityInterface +@SuppressWarnings("PMD.AvoidDuplicateLiterals") public interface JobCreationAndStatusUpdateActivity { /** * JobCreationInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class JobCreationInput { private UUID connectionId; + public JobCreationInput() {} + + public JobCreationInput(UUID connectionId) { + this.connectionId = connectionId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + JobCreationInput that = (JobCreationInput) o; + return Objects.equals(connectionId, that.connectionId); + } + + @Override + public int hashCode() { + return Objects.hashCode(connectionId); + } + + @Override + public String toString() { + return "JobCreationInput{connectionId=" + connectionId + '}'; + } + } /** * JobCreationOutput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class JobCreationOutput { private Long jobId; + public JobCreationOutput() {} + + public JobCreationOutput(Long jobId) { + this.jobId = jobId; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + JobCreationOutput that = (JobCreationOutput) o; + return Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hashCode(jobId); + } + + @Override + public String toString() { + return "JobCreationOutput{jobId=" + jobId + '}'; + } + } /** @@ -56,25 +115,85 @@ class JobCreationOutput { /** * AttemptCreationInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class AttemptCreationInput { private Long jobId; + public AttemptCreationInput() {} + + public AttemptCreationInput(Long jobId) { + this.jobId = jobId; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + AttemptCreationInput that = (AttemptCreationInput) o; + return Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hashCode(jobId); + } + + @Override + public String toString() { + return "AttemptCreationInput{jobId=" + jobId + '}'; + } + } /** * AttemptNumberCreationOutput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class AttemptNumberCreationOutput { private Integer attemptNumber; + public AttemptNumberCreationOutput() {} + + public AttemptNumberCreationOutput(Integer attemptNumber) { + this.attemptNumber = attemptNumber; + } + + public Integer getAttemptNumber() { + return attemptNumber; + } + + public void setAttemptNumber(Integer attemptNumber) { + this.attemptNumber = attemptNumber; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + AttemptNumberCreationOutput that = (AttemptNumberCreationOutput) o; + return Objects.equals(attemptNumber, that.attemptNumber); + } + + @Override + public int hashCode() { + return Objects.hashCode(attemptNumber); + } + + @Override + public String toString() { + return "AttemptNumberCreationOutput{attemptNumber=" + attemptNumber + '}'; + } + } /** @@ -89,9 +208,6 @@ class AttemptNumberCreationOutput { /** * JobSuccessInputWithAttemptNumber. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class JobSuccessInputWithAttemptNumber { private Long jobId; @@ -99,6 +215,72 @@ class JobSuccessInputWithAttemptNumber { private UUID connectionId; private StandardSyncOutput standardSyncOutput; + public JobSuccessInputWithAttemptNumber() {} + + public JobSuccessInputWithAttemptNumber(Long jobId, Integer attemptNumber, UUID connectionId, StandardSyncOutput standardSyncOutput) { + this.jobId = jobId; + this.attemptNumber = attemptNumber; + this.connectionId = connectionId; + this.standardSyncOutput = standardSyncOutput; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getAttemptNumber() { + return attemptNumber; + } + + public void setAttemptNumber(Integer attemptNumber) { + this.attemptNumber = attemptNumber; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + public StandardSyncOutput getStandardSyncOutput() { + return standardSyncOutput; + } + + public void setStandardSyncOutput(StandardSyncOutput standardSyncOutput) { + this.standardSyncOutput = standardSyncOutput; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + JobSuccessInputWithAttemptNumber that = (JobSuccessInputWithAttemptNumber) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(attemptNumber, that.attemptNumber) && Objects.equals( + connectionId, that.connectionId) && Objects.equals(standardSyncOutput, that.standardSyncOutput); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, attemptNumber, connectionId, standardSyncOutput); + } + + @Override + public String toString() { + return "JobSuccessInputWithAttemptNumber{" + + "jobId=" + jobId + + ", attemptNumber=" + attemptNumber + + ", connectionId=" + connectionId + + ", standardSyncOutput=" + standardSyncOutput + + '}'; + } + } /** @@ -110,9 +292,6 @@ class JobSuccessInputWithAttemptNumber { /** * JobFailureInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class JobFailureInput { private Long jobId; @@ -120,6 +299,72 @@ class JobFailureInput { private UUID connectionId; private String reason; + public JobFailureInput() {} + + public JobFailureInput(Long jobId, Integer attemptNumber, UUID connectionId, String reason) { + this.jobId = jobId; + this.attemptNumber = attemptNumber; + this.connectionId = connectionId; + this.reason = reason; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getAttemptNumber() { + return attemptNumber; + } + + public void setAttemptNumber(Integer attemptNumber) { + this.attemptNumber = attemptNumber; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + public String getReason() { + return reason; + } + + public void setReason(String reason) { + this.reason = reason; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + JobFailureInput that = (JobFailureInput) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(attemptNumber, that.attemptNumber) && Objects.equals( + connectionId, that.connectionId) && Objects.equals(reason, that.reason); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, attemptNumber, connectionId, reason); + } + + @Override + public String toString() { + return "JobFailureInput{" + + "jobId=" + jobId + + ", attemptNumber=" + attemptNumber + + ", connectionId=" + connectionId + + ", reason='" + reason + '\'' + + '}'; + } + } /** @@ -131,9 +376,6 @@ class JobFailureInput { /** * AttemptNumberFailureInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class AttemptNumberFailureInput { private Long jobId; @@ -142,6 +384,88 @@ class AttemptNumberFailureInput { private StandardSyncOutput standardSyncOutput; private AttemptFailureSummary attemptFailureSummary; + public AttemptNumberFailureInput() {} + + public AttemptNumberFailureInput(Long jobId, + Integer attemptNumber, + UUID connectionId, + StandardSyncOutput standardSyncOutput, + AttemptFailureSummary attemptFailureSummary) { + this.jobId = jobId; + this.attemptNumber = attemptNumber; + this.connectionId = connectionId; + this.standardSyncOutput = standardSyncOutput; + this.attemptFailureSummary = attemptFailureSummary; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getAttemptNumber() { + return attemptNumber; + } + + public void setAttemptNumber(Integer attemptNumber) { + this.attemptNumber = attemptNumber; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + public StandardSyncOutput getStandardSyncOutput() { + return standardSyncOutput; + } + + public void setStandardSyncOutput(StandardSyncOutput standardSyncOutput) { + this.standardSyncOutput = standardSyncOutput; + } + + public AttemptFailureSummary getAttemptFailureSummary() { + return attemptFailureSummary; + } + + public void setAttemptFailureSummary(AttemptFailureSummary attemptFailureSummary) { + this.attemptFailureSummary = attemptFailureSummary; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + AttemptNumberFailureInput that = (AttemptNumberFailureInput) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(attemptNumber, that.attemptNumber) && Objects.equals( + connectionId, that.connectionId) && Objects.equals(standardSyncOutput, that.standardSyncOutput) + && Objects.equals( + attemptFailureSummary, that.attemptFailureSummary); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, attemptNumber, connectionId, standardSyncOutput, attemptFailureSummary); + } + + @Override + public String toString() { + return "AttemptNumberFailureInput{" + + "jobId=" + jobId + + ", attemptNumber=" + attemptNumber + + ", connectionId=" + connectionId + + ", standardSyncOutput=" + standardSyncOutput + + ", attemptFailureSummary=" + attemptFailureSummary + + '}'; + } + } /** @@ -153,9 +477,6 @@ class AttemptNumberFailureInput { /** * JobCancelledInputWithAttemptNumber. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class JobCancelledInputWithAttemptNumber { private Long jobId; @@ -163,6 +484,72 @@ class JobCancelledInputWithAttemptNumber { private UUID connectionId; private AttemptFailureSummary attemptFailureSummary; + public JobCancelledInputWithAttemptNumber() {} + + public JobCancelledInputWithAttemptNumber(Long jobId, Integer attemptNumber, UUID connectionId, AttemptFailureSummary attemptFailureSummary) { + this.jobId = jobId; + this.attemptNumber = attemptNumber; + this.connectionId = connectionId; + this.attemptFailureSummary = attemptFailureSummary; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getAttemptNumber() { + return attemptNumber; + } + + public void setAttemptNumber(Integer attemptNumber) { + this.attemptNumber = attemptNumber; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + public AttemptFailureSummary getAttemptFailureSummary() { + return attemptFailureSummary; + } + + public void setAttemptFailureSummary(AttemptFailureSummary attemptFailureSummary) { + this.attemptFailureSummary = attemptFailureSummary; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + JobCancelledInputWithAttemptNumber that = (JobCancelledInputWithAttemptNumber) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(attemptNumber, that.attemptNumber) && Objects.equals( + connectionId, that.connectionId) && Objects.equals(attemptFailureSummary, that.attemptFailureSummary); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, attemptNumber, connectionId, attemptFailureSummary); + } + + @Override + public String toString() { + return "JobCancelledInputWithAttemptNumber{" + + "jobId=" + jobId + + ", attemptNumber=" + attemptNumber + + ", connectionId=" + connectionId + + ", attemptFailureSummary=" + attemptFailureSummary + + '}'; + } + } /** @@ -174,14 +561,53 @@ class JobCancelledInputWithAttemptNumber { /** * ReportJobStartInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class ReportJobStartInput { private Long jobId; private UUID connectionId; + public ReportJobStartInput() {} + + public ReportJobStartInput(Long jobId, UUID connectionId) { + this.jobId = jobId; + this.connectionId = connectionId; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + ReportJobStartInput that = (ReportJobStartInput) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(connectionId, that.connectionId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, connectionId); + } + + @Override + public String toString() { + return "ReportJobStartInput{jobId=" + jobId + ", connectionId=" + connectionId + '}'; + } + } @ActivityMethod @@ -190,13 +616,43 @@ class ReportJobStartInput { /** * EnsureCleanJobStateInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class EnsureCleanJobStateInput { private UUID connectionId; + public EnsureCleanJobStateInput() {} + + public EnsureCleanJobStateInput(UUID connectionId) { + this.connectionId = connectionId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + EnsureCleanJobStateInput that = (EnsureCleanJobStateInput) o; + return Objects.equals(connectionId, that.connectionId); + } + + @Override + public int hashCode() { + return Objects.hashCode(connectionId); + } + + @Override + public String toString() { + return "EnsureCleanJobStateInput{connectionId=" + connectionId + '}'; + } + } @ActivityMethod @@ -205,15 +661,64 @@ class EnsureCleanJobStateInput { /** * JobCheckFailureInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class JobCheckFailureInput { private Long jobId; private Integer attemptId; private UUID connectionId; + public JobCheckFailureInput() {} + + public JobCheckFailureInput(Long jobId, Integer attemptId, UUID connectionId) { + this.jobId = jobId; + this.attemptId = attemptId; + this.connectionId = connectionId; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Integer getAttemptId() { + return attemptId; + } + + public void setAttemptId(Integer attemptId) { + this.attemptId = attemptId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + JobCheckFailureInput that = (JobCheckFailureInput) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(attemptId, that.attemptId) && Objects.equals(connectionId, + that.connectionId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, attemptId, connectionId); + } + + @Override + public String toString() { + return "JobCheckFailureInput{jobId=" + jobId + ", attemptId=" + attemptId + ", connectionId=" + connectionId + '}'; + } + } @ActivityMethod diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java index 3b2abdc4617..1659df83e50 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/JobCreationAndStatusUpdateActivityImpl.java @@ -33,18 +33,21 @@ import jakarta.inject.Named; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; import org.openapitools.client.infrastructure.ClientException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * JobCreationAndStatusUpdateActivityImpl. */ -@Slf4j @Singleton @Requires(env = EnvConstants.CONTROL_PLANE) public class JobCreationAndStatusUpdateActivityImpl implements JobCreationAndStatusUpdateActivity { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AirbyteApiClient airbyteApiClient; private final FeatureFlagClient featureFlagClient; private final OutputStorageClient stateClient; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivity.java index 96a96b6d54f..2e91cdbafab 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivity.java @@ -9,10 +9,9 @@ import io.airbyte.metrics.lib.OssMetricsRegistry; import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; +import java.util.Arrays; +import java.util.Objects; import java.util.Optional; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; /** * Custom Temporal activity that records metrics. @@ -34,9 +33,11 @@ enum FailureCause { /** * RecordMetricInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor + @SuppressWarnings({ + "PMD.ArrayIsStoredDirectly", + "PMD.MethodReturnsInternalArray", + "PMD.UseVarargs" + }) class RecordMetricInput { private ConnectionUpdaterInput connectionUpdaterInput; @@ -44,6 +45,75 @@ class RecordMetricInput { private OssMetricsRegistry metricName; private MetricAttribute[] metricAttributes; + public RecordMetricInput() {} + + public RecordMetricInput(ConnectionUpdaterInput connectionUpdaterInput, + Optional failureCause, + OssMetricsRegistry metricName, + MetricAttribute[] metricAttributes) { + this.connectionUpdaterInput = connectionUpdaterInput; + this.failureCause = failureCause; + this.metricName = metricName; + this.metricAttributes = metricAttributes; + } + + public ConnectionUpdaterInput getConnectionUpdaterInput() { + return connectionUpdaterInput; + } + + public void setConnectionUpdaterInput(ConnectionUpdaterInput connectionUpdaterInput) { + this.connectionUpdaterInput = connectionUpdaterInput; + } + + public Optional getFailureCause() { + return failureCause; + } + + public void setFailureCause(Optional failureCause) { + this.failureCause = failureCause; + } + + public OssMetricsRegistry getMetricName() { + return metricName; + } + + public void setMetricName(OssMetricsRegistry metricName) { + this.metricName = metricName; + } + + public MetricAttribute[] getMetricAttributes() { + return metricAttributes; + } + + public void setMetricAttributes(MetricAttribute[] metricAttributes) { + this.metricAttributes = metricAttributes; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + RecordMetricInput that = (RecordMetricInput) o; + return Objects.equals(connectionUpdaterInput, that.connectionUpdaterInput) && Objects.equals(failureCause, that.failureCause) + && metricName == that.metricName && Objects.deepEquals(metricAttributes, that.metricAttributes); + } + + @Override + public int hashCode() { + return Objects.hash(connectionUpdaterInput, failureCause, metricName, Arrays.hashCode(metricAttributes)); + } + + @Override + public String toString() { + return "RecordMetricInput{" + + "connectionUpdaterInput=" + connectionUpdaterInput + + ", failureCause=" + failureCause + + ", metricName=" + metricName + + ", metricAttributes=" + Arrays.toString(metricAttributes) + + '}'; + } + } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivityImpl.java index b5d1f1449f7..f2524d4fdf5 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivityImpl.java @@ -25,24 +25,27 @@ import io.micronaut.http.HttpStatus; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; import org.openapitools.client.infrastructure.ClientException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Implementation of the {@link RecordMetricActivity} that is managed by the application framework * and therefore has access to other singletons managed by the framework. */ -@Slf4j @Singleton @Requires(env = EnvConstants.CONTROL_PLANE) public class RecordMetricActivityImpl implements RecordMetricActivity { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AirbyteApiClient airbyteApiClient; private final MetricClient metricClient; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RetryStatePersistenceActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RetryStatePersistenceActivity.java index d1a39a1db9d..a346fd0b00d 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RetryStatePersistenceActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/RetryStatePersistenceActivity.java @@ -7,10 +7,8 @@ import io.airbyte.commons.temporal.scheduling.retries.RetryManager; import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; +import java.util.Objects; import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; /** * Sends and retrieves retry state data from persistence. @@ -21,52 +19,200 @@ public interface RetryStatePersistenceActivity { /** * Input for hydrate activity method. */ - @AllArgsConstructor - @NoArgsConstructor - @Data class HydrateInput { private Long jobId; private UUID connectionId; + public HydrateInput() {} + + public HydrateInput(Long jobId, UUID connectionId) { + this.jobId = jobId; + this.connectionId = connectionId; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + HydrateInput that = (HydrateInput) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(connectionId, that.connectionId); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, connectionId); + } + + @Override + public String toString() { + return "HydrateInput{jobId=" + jobId + ", connectionId=" + connectionId + '}'; + } + } /** * Output for hydrate activity method. */ - @AllArgsConstructor - @NoArgsConstructor - @Data class HydrateOutput { private RetryManager manager; + public HydrateOutput() {} + + public HydrateOutput(RetryManager manager) { + this.manager = manager; + } + + public RetryManager getManager() { + return manager; + } + + public void setManager(RetryManager manager) { + this.manager = manager; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + HydrateOutput that = (HydrateOutput) o; + return Objects.equals(manager, that.manager); + } + + @Override + public int hashCode() { + return Objects.hashCode(manager); + } + + @Override + public String toString() { + return "HydrateOutput{manager=" + manager + '}'; + } + } /** * Input for persist activity method. */ - @AllArgsConstructor - @NoArgsConstructor - @Data class PersistInput { private Long jobId; private UUID connectionId; private RetryManager manager; + public PersistInput() {} + + public PersistInput(Long jobId, UUID connectionId, RetryManager manager) { + this.jobId = jobId; + this.connectionId = connectionId; + this.manager = manager; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + public RetryManager getManager() { + return manager; + } + + public void setManager(RetryManager manager) { + this.manager = manager; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + PersistInput that = (PersistInput) o; + return Objects.equals(jobId, that.jobId) && Objects.equals(connectionId, that.connectionId) && Objects.equals(manager, + that.manager); + } + + @Override + public int hashCode() { + return Objects.hash(jobId, connectionId, manager); + } + + @Override + public String toString() { + return "PersistInput{jobId=" + jobId + ", connectionId=" + connectionId + ", manager=" + manager + '}'; + } + } /** * Output for persist activity method. */ - @AllArgsConstructor - @NoArgsConstructor - @Data class PersistOutput { private Boolean success; + public PersistOutput() {} + + public PersistOutput(Boolean success) { + this.success = success; + } + + public Boolean getSuccess() { + return success; + } + + public void setSuccess(Boolean success) { + this.success = success; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + PersistOutput that = (PersistOutput) o; + return Objects.equals(success, that.success); + } + + @Override + public int hashCode() { + return Objects.hashCode(success); + } + + @Override + public String toString() { + return "PersistOutput{success=" + success + '}'; + } + } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/StreamResetActivity.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/StreamResetActivity.java index 5a57297f721..ce5b850e900 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/StreamResetActivity.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/StreamResetActivity.java @@ -6,10 +6,8 @@ import io.temporal.activity.ActivityInterface; import io.temporal.activity.ActivityMethod; +import java.util.Objects; import java.util.UUID; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.NoArgsConstructor; /** * StreamResetActivity. @@ -20,14 +18,53 @@ public interface StreamResetActivity { /** * DeleteStreamResetRecordsForJobInput. */ - @Data - @NoArgsConstructor - @AllArgsConstructor class DeleteStreamResetRecordsForJobInput { private UUID connectionId; private Long jobId; + public DeleteStreamResetRecordsForJobInput() {} + + public DeleteStreamResetRecordsForJobInput(UUID connectionId, Long jobId) { + this.connectionId = connectionId; + this.jobId = jobId; + } + + public UUID getConnectionId() { + return connectionId; + } + + public void setConnectionId(UUID connectionId) { + this.connectionId = connectionId; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + @Override + public boolean equals(Object o) { + if (o == null || getClass() != o.getClass()) { + return false; + } + DeleteStreamResetRecordsForJobInput that = (DeleteStreamResetRecordsForJobInput) o; + return Objects.equals(connectionId, that.connectionId) && Objects.equals(jobId, that.jobId); + } + + @Override + public int hashCode() { + return Objects.hash(connectionId, jobId); + } + + @Override + public String toString() { + return "DeleteStreamResetRecordsForJobInput{connectionId=" + connectionId + ", jobId=" + jobId + '}'; + } + } /** diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/StreamResetActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/StreamResetActivityImpl.java index 5ef64b54e02..494442c1b86 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/StreamResetActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/StreamResetActivityImpl.java @@ -19,13 +19,11 @@ import jakarta.inject.Singleton; import java.io.IOException; import java.util.Map; -import lombok.extern.slf4j.Slf4j; import org.openapitools.client.infrastructure.ClientException; /** * StreamResetActivityImpl. */ -@Slf4j @Singleton @Requires(env = EnvConstants.CONTROL_PLANE) public class StreamResetActivityImpl implements StreamResetActivity { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/WorkflowConfigActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/WorkflowConfigActivityImpl.java index 05f6efed621..b91e6648a8f 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/WorkflowConfigActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/scheduling/activities/WorkflowConfigActivityImpl.java @@ -12,13 +12,11 @@ import io.micronaut.context.annotation.Requires; import jakarta.inject.Singleton; import java.time.Duration; -import lombok.extern.slf4j.Slf4j; /** * Implementation of the {@link WorkflowConfigActivity} that is managed by the application framework * and therefore has access to the configuration loaded by the framework. */ -@Slf4j @Singleton @Requires(env = EnvConstants.CONTROL_PLANE) public class WorkflowConfigActivityImpl implements WorkflowConfigActivity { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java index 61c78782a0c..a2c9d103591 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/spec/SpecActivityImpl.java @@ -23,12 +23,10 @@ import io.airbyte.workers.sync.WorkloadClient; import io.micronaut.context.annotation.Requires; import jakarta.inject.Singleton; -import lombok.extern.slf4j.Slf4j; /** * SpecActivityImpl. */ -@Slf4j @Singleton @Requires(env = EnvConstants.CONTROL_PLANE) public class SpecActivityImpl implements SpecActivity { diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/AsyncReplicationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/AsyncReplicationActivityImpl.java index 972ff97cf60..94d9c0f38af 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/AsyncReplicationActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/AsyncReplicationActivityImpl.java @@ -147,7 +147,7 @@ public String startReplication(final ReplicationActivityInput replicationActivit ApmTraceUtils.addTagsToTrace(tracingContext.traceAttributes); - if (replicationActivityInput.getIsReset()) { + if (replicationActivityInput.isReset()) { metricClient.count(OssMetricsRegistry.RESET_REQUEST, 1); } @@ -300,6 +300,14 @@ private void traceReplicationSummary(final ReplicationAttemptSummary replication if (replicationSummary.getStatus() != null) { tags.put(REPLICATION_STATUS_KEY, replicationSummary.getStatus().value()); } + if (replicationSummary.getStartTime() != null && replicationSummary.getEndTime() != null && replicationSummary.getBytesSynced() != null) { + final var elapsedMs = replicationSummary.getEndTime() - replicationSummary.getStartTime(); + if (elapsedMs > 0) { + final var elapsedSeconds = elapsedMs / 1000; + final var throughput = replicationSummary.getBytesSynced() / elapsedSeconds; + metricClient.count(OssMetricsRegistry.REPLICATION_THROUGHPUT_BPS, throughput, metricAttributes); + } + } if (!tags.isEmpty()) { ApmTraceUtils.addTagsToTrace(tags); } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivityImpl.java index ea4788c8a1a..45e93c14bf1 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/RefreshSchemaActivityImpl.java @@ -37,19 +37,22 @@ import io.airbyte.workers.models.RefreshSchemaActivityOutput; import jakarta.inject.Singleton; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.time.OffsetDateTime; import java.util.List; import java.util.Map; import java.util.UUID; -import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Refresh schema temporal activity impl. */ -@Slf4j @Singleton public class RefreshSchemaActivityImpl implements RefreshSchemaActivity { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final AirbyteApiClient airbyteApiClient; private final FeatureFlagClient featureFlagClient; private final PayloadChecker payloadChecker; diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java index d0401a6b715..bbde0f58f44 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/ReplicationActivityImpl.java @@ -58,8 +58,9 @@ import org.slf4j.LoggerFactory; /** - * Replication temporal activity impl. + * Replication temporal activity impl. Deprecated — See AsyncReplicationActivityImpl */ +@Deprecated @Singleton @SuppressWarnings("PMD.UseVarargs") public class ReplicationActivityImpl implements ReplicationActivity { @@ -160,7 +161,7 @@ public StandardSyncOutput replicateV2(final ReplicationActivityInput replication final TracingContext tracingContext = buildTracingContext(replicationActivityInput); ApmTraceUtils.addTagsToTrace(tracingContext.traceAttributes); - if (replicationActivityInput.getIsReset()) { + if (replicationActivityInput.isReset()) { metricClient.count(OssMetricsRegistry.RESET_REQUEST, 1); } final ActivityExecutionContext context = Activity.getExecutionContext(); diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/SyncWorkflowImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/SyncWorkflowImpl.java index fc8786a4ee5..ba74d74f9e9 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/SyncWorkflowImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/sync/SyncWorkflowImpl.java @@ -49,7 +49,6 @@ import io.airbyte.workers.models.RefreshSchemaActivityOutput; import io.airbyte.workers.models.ReplicationActivityInput; import io.airbyte.workers.temporal.activities.ReportRunTimeActivityInput; -import io.airbyte.workers.temporal.activities.SyncFeatureFlagFetcherInput; import io.airbyte.workers.temporal.discover.catalog.DiscoverCatalogHelperActivity; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivity; import io.temporal.failure.ActivityFailure; @@ -114,8 +113,7 @@ public StandardSyncOutput run(final JobRunConfig jobRunConfig, final long startTime = Workflow.currentTimeMillis(); // TODO: Remove this once Workload API rolled out final var sendRunTimeMetrics = shouldReportRuntime(); - final var shouldRunAsChildWorkflow = shouldRunAsAChildWorkflow(connectionId, syncInput.getWorkspaceId(), - syncInput.getConnectionContext().getSourceDefinitionId(), syncInput.getIsReset()); + final var shouldRunAsChildWorkflow = true; ApmTraceUtils .addTagsToTrace(Map.of( @@ -130,13 +128,13 @@ public StandardSyncOutput run(final JobRunConfig jobRunConfig, final Optional sourceId = getSourceId(syncInput); RefreshSchemaActivityOutput refreshSchemaOutput = null; final boolean shouldRefreshSchema = sourceId.isPresent() && refreshSchemaActivity.shouldRefreshSchema(sourceId.get()); - if (sourceId.isPresent() && (shouldRefreshSchema || shouldRunAsChildWorkflow)) { + if (shouldRunAsChildWorkflow || (sourceId.isPresent() && shouldRefreshSchema)) { try { if (shouldRunAsChildWorkflow) { final JsonNode sourceConfig = configFetchActivity.getSourceConfig(sourceId.get()); final String discoverTaskQueue = TemporalTaskQueueUtils.getTaskQueue(TemporalJobType.DISCOVER_SCHEMA); refreshSchemaOutput = runDiscoverAsChildWorkflow(jobRunConfig, sourceLauncherConfig, syncInput, sourceConfig, discoverTaskQueue); - } else if (shouldRefreshSchema) { + } else { refreshSchemaOutput = refreshSchemaActivity.refreshSchemaV2(new RefreshSchemaActivityInput(sourceId.get(), connectionId, syncInput.getWorkspaceId())); } @@ -166,7 +164,7 @@ public StandardSyncOutput run(final JobRunConfig jobRunConfig, final String workloadId = asyncReplicationActivity.startReplication(replicationActivityInput); try { - shouldBlock = true; + shouldBlock = !workloadStatusCheckActivity.isTerminal(workloadId); while (shouldBlock) { Workflow.await(Duration.ofMinutes(15), () -> !shouldBlock); shouldBlock = !workloadStatusCheckActivity.isTerminal(workloadId); @@ -270,26 +268,6 @@ public RefreshSchemaActivityOutput runDiscoverAsChildWorkflow(final JobRunConfig } } - private boolean shouldRunAsAChildWorkflow(final UUID connectionId, final UUID workspaceId, final UUID sourceDefinitionId, final boolean isReset) { - final int shouldRunAsChildWorkflowVersion = Workflow.getVersion("SHOULD_RUN_AS_CHILD", Workflow.DEFAULT_VERSION, 2); - final int versionWithoutResetCheck = 1; - if (shouldRunAsChildWorkflowVersion == Workflow.DEFAULT_VERSION) { - return false; - } else if (shouldRunAsChildWorkflowVersion == versionWithoutResetCheck) { - return syncFeatureFlagFetcherActivity.shouldRunAsChildWorkflow(new SyncFeatureFlagFetcherInput( - Optional.ofNullable(connectionId).orElse(DEFAULT_UUID), - sourceDefinitionId, - workspaceId)); - } else { - return !isReset - && syncFeatureFlagFetcherActivity.shouldRunAsChildWorkflow(new SyncFeatureFlagFetcherInput( - Optional.ofNullable(connectionId).orElse(DEFAULT_UUID), - sourceDefinitionId, - workspaceId)); - } - - } - private boolean shouldReportRuntime() { final int shouldReportRuntimeVersion = Workflow.getVersion("SHOULD_REPORT_RUNTIME", Workflow.DEFAULT_VERSION, 1); @@ -326,7 +304,8 @@ private ReplicationActivityInput generateReplicationActivityInput(final Standard syncInput.getPrefix(), refreshSchemaOutput, syncInput.getConnectionContext(), - signalInput); + signalInput, + syncInput.getNetworkSecurityTokens()); } } diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/tracing/TemporalSdkInterceptor.java b/airbyte-workers/src/main/java/io/airbyte/workers/tracing/TemporalSdkInterceptor.java index 4ddc2be8d9d..906ed2751bb 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/tracing/TemporalSdkInterceptor.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/tracing/TemporalSdkInterceptor.java @@ -14,13 +14,11 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Set; -import lombok.extern.slf4j.Slf4j; /** * Custom {@link TraceInterceptor} to handle Temporal SDK traces that include a non-error used to * exit Workflows. */ -@Slf4j public class TemporalSdkInterceptor implements TraceInterceptor { /** diff --git a/airbyte-workers/src/main/kotlin/io/airbyte/workers/commands/CheckCommand.kt b/airbyte-workers/src/main/kotlin/io/airbyte/workers/commands/CheckCommand.kt index 9e3964020bf..03fc9c00e5d 100644 --- a/airbyte-workers/src/main/kotlin/io/airbyte/workers/commands/CheckCommand.kt +++ b/airbyte-workers/src/main/kotlin/io/airbyte/workers/commands/CheckCommand.kt @@ -58,11 +58,13 @@ class CheckCommand( return WorkloadCreateRequest( workloadId = workloadId, labels = - listOf( + listOfNotNull( WorkloadLabel(Metadata.JOB_LABEL_KEY, jobId), WorkloadLabel(Metadata.ATTEMPT_LABEL_KEY, attemptNumber.toString()), WorkloadLabel(Metadata.WORKSPACE_LABEL_KEY, workspaceId.toString()), WorkloadLabel(Metadata.ACTOR_TYPE, input.checkConnectionInput.actorType.toString()), + // Can be null if this is the first check that gets run + input.checkConnectionInput.actorId?.let { WorkloadLabel(Metadata.ACTOR_ID_LABEL_KEY, it.toString()) }, ), workloadInput = serializedInput, logPath = logClientManager.fullLogPath(TemporalUtils.getJobRoot(workspaceRoot, jobId, attemptNumber.toLong())), diff --git a/airbyte-workers/src/main/kotlin/io/airbyte/workers/commands/DiscoverCommand.kt b/airbyte-workers/src/main/kotlin/io/airbyte/workers/commands/DiscoverCommand.kt index b9b8a8d39a1..cb2523456bd 100644 --- a/airbyte-workers/src/main/kotlin/io/airbyte/workers/commands/DiscoverCommand.kt +++ b/airbyte-workers/src/main/kotlin/io/airbyte/workers/commands/DiscoverCommand.kt @@ -15,9 +15,11 @@ import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest import io.airbyte.workload.api.client.model.generated.WorkloadLabel import io.airbyte.workload.api.client.model.generated.WorkloadPriority.Companion.decode import io.airbyte.workload.api.client.model.generated.WorkloadType +import io.micronaut.context.annotation.Property import jakarta.inject.Named import jakarta.inject.Singleton import java.nio.file.Path +import kotlin.time.Duration import kotlin.time.Duration.Companion.minutes @Singleton @@ -25,6 +27,7 @@ class DiscoverCommand( @Named("workspaceRoot") private val workspaceRoot: Path, airbyteApiClient: AirbyteApiClient, workloadClient: WorkloadClient, + @Property(name = "airbyte.worker.discover.auto-refresh-window") discoverAutoRefreshWindowMinutes: Int, private val workloadIdGenerator: WorkloadIdGenerator, private val logClientManager: LogClientManager, ) : WorkloadCommandBase( @@ -32,11 +35,38 @@ class DiscoverCommand( workloadClient = workloadClient, ) { companion object { - val DiscoverCatalogSnapDuration = 15.minutes.inWholeMilliseconds + val NOOP_DISCOVER_PLACEHOLDER_ID = "auto-refresh-disabled" } + private val discoverAutoRefreshWindow: Duration = + if (discoverAutoRefreshWindowMinutes > 0) discoverAutoRefreshWindowMinutes.minutes else Duration.INFINITE + override val name: String = "discover" + override fun start( + input: DiscoverCatalogInput, + signalPayload: String?, + ): String { + if (isAutoRefresh(input) && discoverAutoRefreshWindow == Duration.INFINITE) { + return NOOP_DISCOVER_PLACEHOLDER_ID + } + return super.start(input, signalPayload) + } + + override fun isTerminal(id: String): Boolean { + if (isNoopDiscover(id)) { + return true + } + return super.isTerminal(id) + } + + override fun cancel(id: String) { + if (isNoopDiscover(id)) { + return + } + super.cancel(id) + } + override fun buildWorkloadCreateRequest( input: DiscoverCatalogInput, signalPayload: String?, @@ -55,7 +85,7 @@ class DiscoverCommand( workloadIdGenerator.generateDiscoverWorkloadIdV2WithSnap( input.discoverCatalogInput.actorContext.actorId, System.currentTimeMillis(), - DiscoverCatalogSnapDuration, + discoverAutoRefreshWindow.inWholeMilliseconds, ) } @@ -72,6 +102,7 @@ class DiscoverCommand( WorkloadLabel(Metadata.ATTEMPT_LABEL_KEY, attemptNumber.toString()), WorkloadLabel(Metadata.WORKSPACE_LABEL_KEY, workspaceId.toString()), WorkloadLabel(Metadata.ACTOR_TYPE, ActorType.SOURCE.toString().toString()), + WorkloadLabel(Metadata.ACTOR_ID_LABEL_KEY, input.discoverCatalogInput.actorContext.actorId.toString()), ), workloadInput = serializedInput, logPath = logClientManager.fullLogPath(TemporalUtils.getJobRoot(workspaceRoot, jobId, attemptNumber.toLong())), @@ -82,11 +113,21 @@ class DiscoverCommand( ) } - override fun getOutput(id: String): ConnectorJobOutput = - workloadClient.getConnectorJobOutput(id) { failureReason -> + override fun getOutput(id: String): ConnectorJobOutput { + if (isNoopDiscover(id)) { + return ConnectorJobOutput() + .withOutputType(ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID) + .withDiscoverCatalogId(null) + } + return workloadClient.getConnectorJobOutput(id) { failureReason -> ConnectorJobOutput() .withOutputType(ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID) .withDiscoverCatalogId(null) .withFailureReason(failureReason) } + } + + private fun isAutoRefresh(input: DiscoverCatalogInput): Boolean = !input.discoverCatalogInput.manual + + private fun isNoopDiscover(id: String): Boolean = NOOP_DISCOVER_PLACEHOLDER_ID == id } diff --git a/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/InvokeOperationsActivityImpl.kt b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/InvokeOperationsActivityImpl.kt index 501e979f9b5..9a12b8f8999 100644 --- a/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/InvokeOperationsActivityImpl.kt +++ b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/sync/InvokeOperationsActivityImpl.kt @@ -43,7 +43,7 @@ class InvokeOperationsActivityImpl( .build().use { _ -> try { logClientManager.setJobMdc(TemporalUtils.getJobRoot(workspaceRoot, jobRunConfig.jobId, jobRunConfig.attemptId)) - LineGobbler.startSection(SECTION_NAME) + logger.info { LineGobbler.formatStartSection(SECTION_NAME) } if (CollectionUtils.isNotEmpty(operations)) { logger.info { "Invoking ${operations.size} post-replication operation(s)..." } @@ -77,7 +77,7 @@ class InvokeOperationsActivityImpl( logger.info { "No post-replication operation(s) to perform." } } } finally { - LineGobbler.endSection(SECTION_NAME) + logger.info { LineGobbler.formatEndSection(SECTION_NAME) } logClientManager.setJobMdc(null) } } diff --git a/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/workflows/ConnectorCommandWorkflow.kt b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/workflows/ConnectorCommandWorkflow.kt index 8b1c42419d2..b6614d8d6fe 100644 --- a/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/workflows/ConnectorCommandWorkflow.kt +++ b/airbyte-workers/src/main/kotlin/io/airbyte/workers/temporal/workflows/ConnectorCommandWorkflow.kt @@ -227,7 +227,7 @@ open class ConnectorCommandWorkflowImpl : ConnectorCommandWorkflow { activityInput = activityInput.copy(id = id) try { - shouldBlock = true + shouldBlock = !connectorCommandActivity.isCommandTerminal(activityInput) while (shouldBlock) { Workflow.await(1.minutes.toJavaDuration()) { !shouldBlock } shouldBlock = !connectorCommandActivity.isCommandTerminal(activityInput) diff --git a/airbyte-workers/src/main/resources/application.yml b/airbyte-workers/src/main/resources/application.yml index 6cbb22a2333..46fcc3432e2 100644 --- a/airbyte-workers/src/main/resources/application.yml +++ b/airbyte-workers/src/main/resources/application.yml @@ -54,6 +54,7 @@ airbyte: state: ${STORAGE_BUCKET_STATE} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT} activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD} + audit-logging: ${STORAGE_BUCKET_AUDIT_LOGGING} azure: connection-string: ${AZURE_STORAGE_CONNECTION_STRING} gcs: @@ -147,6 +148,7 @@ airbyte: jitter-amount-minutes: ${SCHEDULE_VERY_LOW_JITTER_AMOUNT_MINUTES:25} # anything greater than every 6.5 hours has up to 25 minutes of added jitter discover: enabled: ${SHOULD_RUN_DISCOVER_WORKFLOWS:true} + auto-refresh-window: ${DISCOVER_REFRESH_WINDOW_MINUTES:1440} max-workers: ${MAX_CHECK_WORKERS:5} job: error-reporting: diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/config/DataPlaneActivityInitializationMicronautTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/config/DataPlaneActivityInitializationMicronautTest.java index fc5998e557e..40de2a18fbb 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/config/DataPlaneActivityInitializationMicronautTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/config/DataPlaneActivityInitializationMicronautTest.java @@ -44,6 +44,8 @@ value = "workload") @Property(name = "airbyte.cloud.storage.bucket.activity-payload", value = "payload") +// @Property(name = "airbyte.cloud.storage.bucket.audit-logging", +// value = "audit") class DataPlaneActivityInitializationMicronautTest { // Ideally this should be broken down into different tests to get a clearer view of which bean diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java index a1145a2bf99..2544f70b72a 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/ConnectionManagerWorkflowTest.java @@ -87,6 +87,7 @@ import io.temporal.failure.ApplicationFailure; import io.temporal.testing.TestWorkflowEnvironment; import io.temporal.worker.Worker; +import java.lang.invoke.MethodHandles; import java.time.Duration; import java.util.ArrayList; import java.util.List; @@ -95,7 +96,6 @@ import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; -import lombok.extern.slf4j.Slf4j; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; @@ -114,6 +114,8 @@ import org.mockito.ArgumentMatcher; import org.mockito.Mockito; import org.mockito.verification.VerificationMode; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Tests the core state machine of the connection manager workflow. @@ -121,12 +123,13 @@ * We've had race conditions in this in the past which is why (after addressing them) we have * repeated cases, just in case there's a regression where a race condition is added back to a test. */ -@Slf4j // Forcing SAME_THREAD execution as we seem to face the issues described in // https://github.com/mockito/mockito/wiki/FAQ#is-mockito-thread-safe @Execution(ExecutionMode.SAME_THREAD) class ConnectionManagerWorkflowTest { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final long JOB_ID = 1L; private static final int ATTEMPT_ID = 1; private static final int ATTEMPT_NO = 1; diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivityTest.java index fb947827316..09b4c2ab2d0 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/ConfigFetchActivityTest.java @@ -124,7 +124,7 @@ class TimeToWaitTest { @BeforeEach void setup() throws IOException { when(mWorkspaceApi.getWorkspaceByConnectionId(any())).thenReturn(new WorkspaceRead(UUID.randomUUID(), UUID.randomUUID(), "name", "slug", false, - UUID.randomUUID(), null, null, null, null, null, null, null, null, null, null, null, null)); + UUID.randomUUID(), null, null, null, null, null, null, null, null, null, null, null, null, null)); } @Nested @@ -429,7 +429,7 @@ void testCronSchedulingNoise() throws IOException { when(mWorkspaceApi.getWorkspaceByConnectionId(any())) .thenReturn(new WorkspaceRead(UUID.fromString("226edbc1-4a9c-4401-95a9-90435d667d9d"), UUID.randomUUID(), "name", "slug", false, - UUID.randomUUID(), null, null, null, null, null, null, null, null, null, null, null, null)); + UUID.randomUUID(), null, null, null, null, null, null, null, null, null, null, null, null, null)); configFetchActivity = new ConfigFetchActivityImpl(mAirbyteApiClient, SYNC_JOB_MAX_ATTEMPTS, @@ -465,7 +465,7 @@ void testGetMaxAttempt() { configFetchActivity = new ConfigFetchActivityImpl(mAirbyteApiClient, maxAttempt, () -> Instant.now().getEpochSecond(), mFeatureFlagClient, mScheduleJitterHelper); - Assertions.assertThat(configFetchActivity.getMaxAttempt().getMaxAttempt()) + Assertions.assertThat(configFetchActivity.getMaxAttempt().maxAttempt()) .isEqualTo(maxAttempt); } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivityTest.java index 522258d2327..6239af1f19d 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/FeatureFlagFetchActivityTest.java @@ -38,7 +38,7 @@ void testGetFeatureFlags() { final FeatureFlagFetchActivity.FeatureFlagFetchInput input = new FeatureFlagFetchActivity.FeatureFlagFetchInput(CONNECTION_ID); final FeatureFlagFetchActivity.FeatureFlagFetchOutput output = featureFlagFetchActivity.getFeatureFlags(input); - Assertions.assertEquals(Map.of(UseAsyncActivities.INSTANCE.getKey(), false), output.getFeatureFlags()); + Assertions.assertEquals(Map.of(UseAsyncActivities.INSTANCE.getKey(), true), output.getFeatureFlags()); } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivityImplTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivityImplTest.java index d210ffd17f0..53b5920986a 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivityImplTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RecordMetricActivityImplTest.java @@ -53,7 +53,7 @@ void setup() throws IOException { when(connectionUpdaterInput.getConnectionId()).thenReturn(CONNECTION_ID); when(workspaceApi.getWorkspaceByConnectionId(new ConnectionIdRequestBody(CONNECTION_ID))) .thenReturn(new WorkspaceRead(WORKSPACE_ID, UUID.randomUUID(), "name", "slug", false, UUID.randomUUID(), null, null, null, null, null, null, - null, null, null, null, null, null)); + null, null, null, null, null, null, null)); when(workspaceApi.getWorkspaceByConnectionId(new ConnectionIdRequestBody(CONNECTION_ID_WITHOUT_WORKSPACE))) .thenThrow(new ClientException("Not Found", HttpStatus.NOT_FOUND.getCode(), null)); when(airbyteApiClient.getWorkspaceApi()).thenReturn(workspaceApi); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RefreshSchemaActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RefreshSchemaActivityTest.java index 3dd2fd4c77e..126978f9a07 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RefreshSchemaActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RefreshSchemaActivityTest.java @@ -97,7 +97,7 @@ void setUp() throws IOException { mPayloadChecker = mock(PayloadChecker.class, withSettings().strictness(Strictness.LENIENT)); when(mWorkspaceApi.getWorkspaceByConnectionId(new ConnectionIdRequestBody(CONNECTION_ID))) .thenReturn(new WorkspaceRead(WORKSPACE_ID, UUID.randomUUID(), "name", "slug", false, UUID.randomUUID(), null, null, null, null, null, null, - null, null, null, null, null, null)); + null, null, null, null, null, null, null)); when(mSourceApi.getSource(new SourceIdRequestBody(SOURCE_ID))).thenReturn( new SourceRead(SOURCE_DEFINITION_ID, SOURCE_ID, WORKSPACE_ID, Jsons.jsonNode(Map.of()), "name", "source-name", 1L, null, null, null, null, null)); @@ -222,7 +222,7 @@ void testRefreshSchemaWithAutoPropagateFeatureFlagAsFalse() throws IOException { when(mFeatureFlagClient.boolVariation(ShouldRunRefreshSchema.INSTANCE, new Multi(expectedRefreshFeatureFlagContexts))).thenReturn(true); when(mWorkspaceApi.getWorkspaceByConnectionId(new ConnectionIdRequestBody(connectionId))) .thenReturn(new WorkspaceRead(workspaceId, UUID.randomUUID(), "name", "slug", false, UUID.randomUUID(), null, null, null, null, null, null, - null, null, null, null, null, null)); + null, null, null, null, null, null, null)); refreshSchemaActivity.refreshSchema(sourceId, connectionId); diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RetryStatePersistenceActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RetryStatePersistenceActivityTest.java index 208b6fc3edd..d61dc1e837a 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RetryStatePersistenceActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/RetryStatePersistenceActivityTest.java @@ -47,7 +47,7 @@ public void setup() throws Exception { mRetryStateClient = mock(RetryStateClient.class); mWorkspaceApi = mock(WorkspaceApi.class); when(mWorkspaceApi.getWorkspaceByConnectionId(any())).thenReturn(new WorkspaceRead(UUID.randomUUID(), UUID.randomUUID(), "name", "slug", false, - UUID.randomUUID(), null, null, null, null, null, null, null, null, null, null, null, null)); + UUID.randomUUID(), null, null, null, null, null, null, null, null, null, null, null, null, null)); when(mAirbyteApiClient.getWorkspaceApi()).thenReturn(mWorkspaceApi); } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/SlackConfigActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/SlackConfigActivityTest.java index a6ca27c6165..627321ec63e 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/SlackConfigActivityTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/scheduling/activities/SlackConfigActivityTest.java @@ -41,7 +41,7 @@ void testFetchSlackConfigurationSlackNotificationPresent() throws IOException { SlackNotificationConfiguration config = new SlackNotificationConfiguration("webhook"); List notifications = List.of(new Notification(NotificationType.SLACK, false, true, config, null)); final WorkspaceRead workspaceRead = new WorkspaceRead(UUID.randomUUID(), UUID.randomUUID(), "name", "slug", false, UUID.randomUUID(), null, null, - null, null, null, notifications, null, null, null, null, null, null); + null, null, null, notifications, null, null, null, null, null, null, null); when(mAirbyteApiClient.getWorkspaceApi().getWorkspaceByConnectionId(requestBody)).thenReturn(workspaceRead); Assertions.assertThat("webhook").isEqualTo(slackConfigActivity.fetchSlackConfiguration(connectionId).get().getWebhook()); } @@ -53,7 +53,7 @@ void testFetchSlackConfigurationSlackNotificationNotPresent() throws IOException CustomerioNotificationConfiguration config = new CustomerioNotificationConfiguration(); List notifications = List.of(new Notification(NotificationType.CUSTOMERIO, false, true, null, config)); final WorkspaceRead workspaceRead = new WorkspaceRead(UUID.randomUUID(), UUID.randomUUID(), "name", "slug", false, UUID.randomUUID(), null, null, - null, null, null, notifications, null, null, null, null, null, null); + null, null, null, notifications, null, null, null, null, null, null, null); when(mAirbyteApiClient.getWorkspaceApi().getWorkspaceByConnectionId(requestBody)).thenReturn(workspaceRead); Assertions.assertThat(Optional.ofNullable(null)).isEqualTo(slackConfigActivity.fetchSlackConfiguration(connectionId)); } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/SyncWorkflowTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/SyncWorkflowTest.java index 157c3ce5295..a300920fb96 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/SyncWorkflowTest.java +++ b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/sync/SyncWorkflowTest.java @@ -4,15 +4,14 @@ package io.airbyte.workers.temporal.sync; -import static io.airbyte.workers.temporal.workflows.MockDiscoverCatalogAndAutoPropagateWorkflow.REFRESH_SCHEMA_ACTIVITY_OUTPUT; import static org.junit.Assert.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; @@ -22,7 +21,6 @@ import io.airbyte.commons.temporal.TemporalConstants; import io.airbyte.commons.temporal.TemporalJobType; import io.airbyte.commons.temporal.scheduling.SyncWorkflow; -import io.airbyte.config.ConnectionContext; import io.airbyte.config.FailureReason.FailureOrigin; import io.airbyte.config.FailureReason.FailureType; import io.airbyte.config.OperatorWebhook; @@ -39,10 +37,11 @@ import io.airbyte.micronaut.temporal.TemporalProxyHelper; import io.airbyte.persistence.job.models.IntegrationLauncherConfig; import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.models.RefreshSchemaActivityInput; -import io.airbyte.workers.models.RefreshSchemaActivityOutput; -import io.airbyte.workers.models.ReplicationActivityInput; +import io.airbyte.workers.models.PostprocessCatalogOutput; +import io.airbyte.workers.temporal.discover.catalog.DiscoverCatalogHelperActivity; +import io.airbyte.workers.temporal.discover.catalog.DiscoverCatalogHelperActivityImpl; import io.airbyte.workers.temporal.scheduling.activities.ConfigFetchActivityImpl; +import io.airbyte.workers.temporal.workflows.MockConnectorCommandWorkflow; import io.airbyte.workers.temporal.workflows.MockDiscoverCatalogAndAutoPropagateWorkflow; import io.airbyte.workers.test_utils.TestConfigHelpers; import io.micronaut.context.BeanRegistration; @@ -79,6 +78,9 @@ class SyncWorkflowTest { private TestWorkflowEnvironment testEnv; private Worker syncWorker; private WorkflowClient client; + private AsyncReplicationActivity asyncReplicationActivity; + private DiscoverCatalogHelperActivity discoverCatalogHelperActivity; + private WorkloadStatusCheckActivity workloadStatusCheckActivity; private ReplicationActivityImpl replicationActivity; private InvokeOperationsActivity invokeOperationsActivity; private RefreshSchemaActivityImpl refreshSchemaActivity; @@ -132,6 +134,9 @@ void setUp() { final Worker discoverWorker = testEnv.newWorker(TemporalJobType.DISCOVER_SCHEMA.name()); discoverWorker.registerWorkflowImplementationTypes(MockDiscoverCatalogAndAutoPropagateWorkflow.class); + final Worker connectorCommandWorker = testEnv.newWorker(TemporalJobType.SYNC.name()); + connectorCommandWorker.registerWorkflowImplementationTypes(MockConnectorCommandWorkflow.class); + syncWorker = testEnv.newWorker(SYNC_QUEUE); client = testEnv.getWorkflowClient(); @@ -145,12 +150,17 @@ void setUp() { replicationSuccessOutput = new StandardSyncOutput().withStandardSyncSummary(standardSyncSummary); replicationFailOutput = new StandardSyncOutput().withStandardSyncSummary(failedSyncSummary); replicationActivity = mock(ReplicationActivityImpl.class); + asyncReplicationActivity = mock(AsyncReplicationActivityImpl.class); + discoverCatalogHelperActivity = mock(DiscoverCatalogHelperActivityImpl.class); + workloadStatusCheckActivity = mock(WorkloadStatusCheckActivityImpl.class); invokeOperationsActivity = mock(InvokeOperationsActivityImpl.class); refreshSchemaActivity = mock(RefreshSchemaActivityImpl.class); configFetchActivity = mock(ConfigFetchActivityImpl.class); reportRunTimeActivity = mock(ReportRunTimeActivityImpl.class); syncFeatureFlagFetcherActivity = mock(SyncFeatureFlagFetcherActivityImpl.class); + when(discoverCatalogHelperActivity.postprocess(any())).thenReturn(PostprocessCatalogOutput.Companion.success(null)); + when(configFetchActivity.getSourceId(sync.getConnectionId())).thenReturn(Optional.of(SOURCE_ID)); when(refreshSchemaActivity.shouldRefreshSchema(SOURCE_ID)).thenReturn(true); when(configFetchActivity.getStatus(sync.getConnectionId())).thenReturn(Optional.of(ConnectionStatus.ACTIVE)); @@ -240,6 +250,9 @@ private StandardSyncOutput execute() { // bundle up all the temporal worker setup / execution into one method. private StandardSyncOutput execute(final boolean isReset) { syncWorker.registerActivitiesImplementations(replicationActivity, + asyncReplicationActivity, + discoverCatalogHelperActivity, + workloadStatusCheckActivity, invokeOperationsActivity, refreshSchemaActivity, configFetchActivity, @@ -254,59 +267,13 @@ private StandardSyncOutput execute(final boolean isReset) { @Test void testSuccess() throws Exception { - doReturn(replicationSuccessOutput).when(replicationActivity).replicateV2(any()); - - final StandardSyncOutput actualOutput = execute(); - - verifyReplication(replicationActivity, syncInput); - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); - verify(reportRunTimeActivity).reportRunTime(any()); - assertEquals( - replicationSuccessOutput.getStandardSyncSummary(), - removeRefreshTime(actualOutput.getStandardSyncSummary())); - } - - @Test - void testSuccessWithChildWorkflow() { - doReturn(replicationSuccessOutput).when(replicationActivity).replicateV2(any()); - doReturn(true).when(syncFeatureFlagFetcherActivity).shouldRunAsChildWorkflow(any()); + final String workloadId = "my-successful-workload"; + doReturn(workloadId).when(asyncReplicationActivity).startReplication(any()); + doReturn(true).when(workloadStatusCheckActivity).isTerminal(workloadId); + doReturn(replicationSuccessOutput).when(asyncReplicationActivity).getReplicationOutput(any(), eq(workloadId)); final StandardSyncOutput actualOutput = execute(); - verifyReplication(replicationActivity, syncInput, REFRESH_SCHEMA_ACTIVITY_OUTPUT); - verifyShouldRefreshSchema(refreshSchemaActivity); - verify(reportRunTimeActivity).reportRunTime(any()); - assertEquals( - replicationSuccessOutput.getStandardSyncSummary(), - removeRefreshTime(actualOutput.getStandardSyncSummary())); - } - - @Test - void testNoChildWorkflowWithReset() { - doReturn(replicationSuccessOutput).when(replicationActivity).replicateV2(any()); - doReturn(true).when(syncFeatureFlagFetcherActivity).shouldRunAsChildWorkflow(any()); - - final StandardSyncOutput actualOutput = execute(true); - - verifyReplication(replicationActivity, syncInput, null); - verifyShouldRefreshSchema(refreshSchemaActivity); - verify(reportRunTimeActivity).reportRunTime(any()); - assertEquals( - replicationSuccessOutput.getStandardSyncSummary(), - removeRefreshTime(actualOutput.getStandardSyncSummary())); - } - - @Test - void passesThroughFFCall() throws Exception { - - doReturn(replicationSuccessOutput).when(replicationActivity).replicateV2(any()); - - final StandardSyncOutput actualOutput = execute(); - - verifyReplication(replicationActivity, syncInput, null); - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); assertEquals( replicationSuccessOutput.getStandardSyncSummary(), removeRefreshTime(actualOutput.getStandardSyncSummary())); @@ -314,24 +281,20 @@ void passesThroughFFCall() throws Exception { @Test void testReplicationFailure() throws Exception { - doThrow(new IllegalArgumentException("induced exception")).when(replicationActivity).replicateV2(any()); + doThrow(new IllegalArgumentException("induced exception")).when(asyncReplicationActivity).startReplication(any()); assertThrows(WorkflowFailedException.class, this::execute); - - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); - verifyReplication(replicationActivity, syncInput); } @Test void testReplicationFailedGracefully() throws Exception { - doReturn(replicationFailOutput).when(replicationActivity).replicateV2(any()); + final String workloadId = "my-failed-workload"; + doReturn(workloadId).when(asyncReplicationActivity).startReplication(any()); + doReturn(true).when(workloadStatusCheckActivity).isTerminal(workloadId); + doReturn(replicationFailOutput).when(asyncReplicationActivity).getReplicationOutput(any(), eq(workloadId)); final StandardSyncOutput actualOutput = execute(); - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); - verifyReplication(replicationActivity, syncInput); assertEquals( replicationFailOutput.getStandardSyncSummary(), removeRefreshTime(actualOutput.getStandardSyncSummary())); @@ -346,21 +309,22 @@ private StandardSyncSummary removeRefreshTime(final StandardSyncSummary in) { @Test void testCancelDuringReplication() throws Exception { + final String workloadId = "my-cancelled-workload"; + doReturn(workloadId).when(asyncReplicationActivity).startReplication(any()); doAnswer(ignored -> { cancelWorkflow(); return replicationSuccessOutput; - }).when(replicationActivity).replicateV2(any()); + }).when(workloadStatusCheckActivity).isTerminal(eq(workloadId)); assertThrows(WorkflowFailedException.class, this::execute); - - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); - verifyReplication(replicationActivity, syncInput); } @Test void testWebhookOperation() { - when(replicationActivity.replicateV2(any())).thenReturn(new StandardSyncOutput()); + final String workloadId = "my-successful-workload"; + doReturn(workloadId).when(asyncReplicationActivity).startReplication(any()); + doReturn(true).when(workloadStatusCheckActivity).isTerminal(workloadId); + doReturn(new StandardSyncOutput()).when(asyncReplicationActivity).getReplicationOutput(any(), eq(workloadId)); final StandardSyncOperation webhookOperation = new StandardSyncOperation() .withOperationId(UUID.randomUUID()) .withOperatorType(OperatorType.WEBHOOK) @@ -378,12 +342,10 @@ void testWebhookOperation() { } @Test - void testSkipReplicationAfterRefreshSchema() throws Exception { + void testSkipReplicationIfConnectionDisabledBySchemaRefresh() throws Exception { when(configFetchActivity.getStatus(any())).thenReturn(Optional.of(ConnectionStatus.INACTIVE)); final StandardSyncOutput output = execute(); - verifyShouldRefreshSchema(refreshSchemaActivity); - verifyRefreshSchema(refreshSchemaActivity, sync, syncInput); - verifyNoInteractions(replicationActivity); + verifyNoInteractions(asyncReplicationActivity); assertEquals(output.getStandardSyncSummary().getStatus(), ReplicationStatus.CANCELLED); } @@ -391,7 +353,7 @@ void testSkipReplicationAfterRefreshSchema() throws Exception { void testGetProperFailureIfRefreshFails() throws Exception { when(refreshSchemaActivity.shouldRefreshSchema(any())).thenReturn(true); doThrow(new RuntimeException()) - .when(refreshSchemaActivity).refreshSchemaV2(any()); + .when(discoverCatalogHelperActivity).postprocess(any()); final StandardSyncOutput output = execute(); assertEquals(ReplicationStatus.FAILED, output.getStandardSyncSummary().getStatus()); assertEquals(1, output.getFailures().size()); @@ -416,50 +378,4 @@ private void cancelWorkflow() { testEnv.getWorkflowService().blockingStub().requestCancelWorkflowExecution(cancelRequest); } - private static void verifyReplication(final ReplicationActivity replicationActivity, final StandardSyncInput syncInput) { - verifyReplication(replicationActivity, syncInput, null); - } - - private static void verifyReplication(final ReplicationActivity replicationActivity, - final StandardSyncInput syncInput, - final RefreshSchemaActivityOutput refreshSchemaOutput) { - verify(replicationActivity).replicateV2(new ReplicationActivityInput( - syncInput.getSourceId(), - syncInput.getDestinationId(), - syncInput.getSourceConfiguration(), - syncInput.getDestinationConfiguration(), - JOB_RUN_CONFIG, - SOURCE_LAUNCHER_CONFIG, - DESTINATION_LAUNCHER_CONFIG, - syncInput.getSyncResourceRequirements(), - syncInput.getWorkspaceId(), - syncInput.getConnectionId(), - SYNC_QUEUE, - syncInput.getIsReset(), - syncInput.getNamespaceDefinition(), - syncInput.getNamespaceFormat(), - syncInput.getPrefix(), - refreshSchemaOutput, - new ConnectionContext().withOrganizationId(ORGANIZATION_ID).withSourceDefinitionId(SOURCE_DEFINITION_ID), - null)); - } - - private static void verifyShouldRefreshSchema(final RefreshSchemaActivity refreshSchemaActivity) { - verify(refreshSchemaActivity).shouldRefreshSchema(SOURCE_ID); - } - - private static void verifyRefreshSchema(final RefreshSchemaActivity refreshSchemaActivity, - final StandardSync sync, - final StandardSyncInput syncInput) - throws Exception { - verify(refreshSchemaActivity).refreshSchemaV2(new RefreshSchemaActivityInput(SOURCE_ID, sync.getConnectionId(), syncInput.getWorkspaceId())); - } - - private static void verifyRefreshSchemaChildWorkflow(final RefreshSchemaActivity refreshSchemaActivity, - final StandardSync sync, - final StandardSyncInput syncInput) - throws Exception { - verify(refreshSchemaActivity).refreshSchemaV2(new RefreshSchemaActivityInput(SOURCE_ID, sync.getConnectionId(), syncInput.getWorkspaceId())); - } - } diff --git a/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.kt b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.kt index bcdc71f4675..67b3e9eea86 100644 --- a/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.kt +++ b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.kt @@ -109,7 +109,27 @@ class CheckConnectionActivityTest { fun `runWithWorkload missing output`() { val input = checkInput every { workloadClient.getConnectorJobOutput(WORKLOAD_ID, any()) } returns - ConnectorJobOutput().withOutputType(ConnectorJobOutput.OutputType.CHECK_CONNECTION) + ConnectorJobOutput() + .withOutputType(ConnectorJobOutput.OutputType.CHECK_CONNECTION) + .withCheckConnection( + StandardCheckConnectionOutput() + .withStatus(StandardCheckConnectionOutput.Status.FAILED) + .withMessage("missing output"), + ) + val output = checkConnectionActivity.runWithWorkload(input) + verify { workloadIdGenerator.generateCheckWorkloadId(ACTOR_DEFINITION_ID, JOB_ID, ATTEMPT_NUMBER_AS_INT) } + assertEquals(WORKLOAD_ID, createReqSlot.captured.workloadId) + assertEquals(WorkloadType.CHECK, createReqSlot.captured.type) + assertEquals(ConnectorJobOutput.OutputType.CHECK_CONNECTION, output.outputType) + assertEquals(StandardCheckConnectionOutput.Status.FAILED, output.checkConnection.status) + } + + @Test + fun `runWithWorkload with source ID present`() { + val input = checkInputWithActorId + every { workloadClient.getConnectorJobOutput(WORKLOAD_ID, any()) } returns + ConnectorJobOutput() + .withOutputType(ConnectorJobOutput.OutputType.CHECK_CONNECTION) .withCheckConnection( StandardCheckConnectionOutput() .withStatus(StandardCheckConnectionOutput.Status.FAILED) @@ -135,16 +155,38 @@ class CheckConnectionActivityTest { private val checkInput: CheckConnectionInput get() { - val input = CheckConnectionInput() - input.jobRunConfig = JobRunConfig().withJobId(JOB_ID).withAttemptId(ATTEMPT_NUMBER) - input.checkConnectionInput = - StandardCheckConnectionInput() - .withActorType(ActorType.SOURCE) - .withActorContext( - ActorContext().withActorDefinitionId(ACTOR_DEFINITION_ID) - .withWorkspaceId(WORKSPACE_ID), - ) - input.launcherConfig = IntegrationLauncherConfig().withConnectionId(CONNECTION_ID).withPriority(WorkloadPriority.DEFAULT) + val input = + CheckConnectionInput( + jobRunConfig = JobRunConfig().withJobId(JOB_ID).withAttemptId(ATTEMPT_NUMBER), + checkConnectionInput = + StandardCheckConnectionInput() + .withActorType(ActorType.SOURCE) + .withActorContext( + ActorContext() + .withActorDefinitionId(ACTOR_DEFINITION_ID) + .withWorkspaceId(WORKSPACE_ID), + ), + launcherConfig = IntegrationLauncherConfig().withConnectionId(CONNECTION_ID).withPriority(WorkloadPriority.DEFAULT), + ) + return input + } + + private val checkInputWithActorId: CheckConnectionInput + get() { + val input = + CheckConnectionInput( + jobRunConfig = JobRunConfig().withJobId(JOB_ID).withAttemptId(ATTEMPT_NUMBER), + checkConnectionInput = + StandardCheckConnectionInput() + .withActorType(ActorType.SOURCE) + .withActorContext( + ActorContext() + .withActorDefinitionId(ACTOR_DEFINITION_ID) + .withWorkspaceId(WORKSPACE_ID), + ) + .withActorId(UUID.randomUUID()), + launcherConfig = IntegrationLauncherConfig().withConnectionId(CONNECTION_ID).withPriority(WorkloadPriority.DEFAULT), + ) return input } } diff --git a/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt index dcc143f862f..3a9df18d6ee 100644 --- a/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt +++ b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt @@ -18,7 +18,6 @@ import io.airbyte.featureflag.WorkloadCheckFrequencyInSeconds import io.airbyte.persistence.job.models.IntegrationLauncherConfig import io.airbyte.persistence.job.models.JobRunConfig import io.airbyte.workers.commands.DiscoverCommand -import io.airbyte.workers.commands.DiscoverCommand.Companion.DiscoverCatalogSnapDuration import io.airbyte.workers.models.DiscoverCatalogInput import io.airbyte.workers.sync.WorkloadClient import io.airbyte.workers.workload.WorkloadIdGenerator @@ -37,8 +36,12 @@ import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.ValueSource import java.nio.file.Path import java.util.UUID +import kotlin.time.Duration.Companion.minutes +import kotlin.time.DurationUnit class DiscoverCatalogActivityTest { + private val discoverCatalogSnapDuration = 15.minutes + private val workspaceRoot: Path = Path.of("workspace-root") private val airbyteApiClient: AirbyteApiClient = mockk() private val featureFlagClient: FeatureFlagClient = spyk(TestClient()) @@ -65,6 +68,7 @@ class DiscoverCatalogActivityTest { workloadClient = workloadClient, workloadIdGenerator = workloadIdGenerator, logClientManager = logClientManager, + discoverAutoRefreshWindowMinutes = discoverCatalogSnapDuration.toInt(DurationUnit.MINUTES), ), ) discoverCatalogActivity = @@ -99,34 +103,39 @@ class DiscoverCatalogActivityTest { val workloadId = "789" val workspaceId = UUID.randomUUID() val connectionId = UUID.randomUUID() - val input = DiscoverCatalogInput() - input.jobRunConfig = - JobRunConfig() - .withJobId(jobId) - .withAttemptId(attemptNumber.toLong()) - input.discoverCatalogInput = - StandardDiscoverCatalogInput() - .withActorContext( - ActorContext() - .withWorkspaceId(workspaceId) - .withActorDefinitionId(actorDefinitionId) - .withActorId(actorId), - ) - .withManual(!runAsPartOfSync) - input.launcherConfig = - IntegrationLauncherConfig().withConnectionId( - connectionId, - ).withWorkspaceId(workspaceId).withPriority(WorkloadPriority.DEFAULT) - + val input = + DiscoverCatalogInput( + jobRunConfig = + JobRunConfig() + .withJobId(jobId) + .withAttemptId(attemptNumber.toLong()), + discoverCatalogInput = + StandardDiscoverCatalogInput() + .withActorContext( + ActorContext() + .withWorkspaceId(workspaceId) + .withActorDefinitionId(actorDefinitionId) + .withActorId(actorId), + ).withManual(!runAsPartOfSync), + launcherConfig = + IntegrationLauncherConfig() + .withConnectionId( + connectionId, + ).withWorkspaceId(workspaceId) + .withPriority(WorkloadPriority.DEFAULT), + ) if (runAsPartOfSync) { - every { workloadIdGenerator.generateDiscoverWorkloadIdV2WithSnap(eq(actorId), any(), eq(DiscoverCatalogSnapDuration)) }.returns(workloadId) + every { + workloadIdGenerator.generateDiscoverWorkloadIdV2WithSnap(eq(actorId), any(), eq(discoverCatalogSnapDuration.inWholeMilliseconds)) + }.returns(workloadId) } else { every { workloadIdGenerator.generateDiscoverWorkloadId(actorDefinitionId, jobId, attemptNumber) }.returns(workloadId) } every { discoverCommand.getGeography(connectionId, workspaceId) }.returns(Geography.AUTO) val output = - ConnectorJobOutput().withOutputType(ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID) + ConnectorJobOutput() + .withOutputType(ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID) .withDiscoverCatalogId(UUID.randomUUID()) every { workloadClient.getConnectorJobOutput(workloadId, any()) } returns output diff --git a/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/workflows/MockConnectorCommandWorkflow.kt b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/workflows/MockConnectorCommandWorkflow.kt new file mode 100644 index 00000000000..3b4a465cde4 --- /dev/null +++ b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/workflows/MockConnectorCommandWorkflow.kt @@ -0,0 +1,11 @@ +package io.airbyte.workers.temporal.workflows + +import io.airbyte.commons.temporal.scheduling.ConnectorCommandInput +import io.airbyte.commons.temporal.scheduling.ConnectorCommandWorkflow +import io.airbyte.config.ConnectorJobOutput + +class MockConnectorCommandWorkflow : ConnectorCommandWorkflow { + override fun run(input: ConnectorCommandInput): ConnectorJobOutput = ConnectorJobOutput() + + override fun checkTerminalStatus() { } +} diff --git a/airbyte-workers/src/test/resources/application-schedule-jitter-test.yaml b/airbyte-workers/src/test/resources/application-schedule-jitter-test.yaml index 9afd0d7cbee..50819c3ee4c 100644 --- a/airbyte-workers/src/test/resources/application-schedule-jitter-test.yaml +++ b/airbyte-workers/src/test/resources/application-schedule-jitter-test.yaml @@ -23,5 +23,6 @@ airbyte: state: ${STORAGE_BUCKET_STATE:state} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT:workload-output} activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD:activity-payload} + audit-logging: ${STORAGE_BUCKET_AUDIT_LOGGING} gcs: application-credentials: ${GOOGLE_APPLICATION_CREDENTIALS:not-blank} diff --git a/airbyte-workload-init-container/src/main/kotlin/config/ApplicationBeanFactory.kt b/airbyte-workload-init-container/src/main/kotlin/config/ApplicationBeanFactory.kt index 48b274b15c4..0fb59fb67f1 100644 --- a/airbyte-workload-init-container/src/main/kotlin/config/ApplicationBeanFactory.kt +++ b/airbyte-workload-init-container/src/main/kotlin/config/ApplicationBeanFactory.kt @@ -13,6 +13,7 @@ import io.airbyte.workers.ConnectorSecretsHydrator import io.airbyte.workers.DiscoverCatalogInputHydrator import io.airbyte.workers.ReplicationInputHydrator import io.airbyte.workers.helper.BackfillHelper +import io.airbyte.workers.helper.MapperSecretHydrationHelper import io.airbyte.workers.helper.ResumableFullRefreshStatsHelper import io.airbyte.workers.input.ReplicationInputMapper import io.micronaut.context.annotation.Factory @@ -26,6 +27,7 @@ class ApplicationBeanFactory { airbyteApiClient: AirbyteApiClient, resumableFullRefreshStatsHelper: ResumableFullRefreshStatsHelper, secretsRepositoryReader: SecretsRepositoryReader, + mapperSecretHydrationHelper: MapperSecretHydrationHelper, backfillHelper: BackfillHelper, catalogClientConverters: CatalogClientConverters, metricClient: MetricClient, @@ -36,6 +38,7 @@ class ApplicationBeanFactory { airbyteApiClient, resumableFullRefreshStatsHelper, secretsRepositoryReader, + mapperSecretHydrationHelper, backfillHelper, catalogClientConverters, mapper, diff --git a/airbyte-workload-init-container/src/main/kotlin/input/ReplicationHydrationProcessor.kt b/airbyte-workload-init-container/src/main/kotlin/input/ReplicationHydrationProcessor.kt index 56c9ad0ca8d..7a0e7bcb153 100644 --- a/airbyte-workload-init-container/src/main/kotlin/input/ReplicationHydrationProcessor.kt +++ b/airbyte-workload-init-container/src/main/kotlin/input/ReplicationHydrationProcessor.kt @@ -87,7 +87,9 @@ class ReplicationHydrationProcessor( val transformedCatalog = destinationCatalogGenerator.generateDestinationCatalog(hydrated.catalog) - sendMapperErrorMetrics(transformedCatalog, parsed.connectionId) + parsed.connectionId?.let { + sendMapperErrorMetrics(transformedCatalog, it) + } val destinationCatalog = mapper.mapCatalog(destinationCatalogGenerator.generateDestinationCatalog(hydrated.catalog).catalog) diff --git a/airbyte-workload-init-container/src/test/kotlin/input/CheckHydrationProcessorTest.kt b/airbyte-workload-init-container/src/test/kotlin/input/CheckHydrationProcessorTest.kt index 2c592f091e4..0ad5742b4f4 100644 --- a/airbyte-workload-init-container/src/test/kotlin/input/CheckHydrationProcessorTest.kt +++ b/airbyte-workload-init-container/src/test/kotlin/input/CheckHydrationProcessorTest.kt @@ -5,6 +5,7 @@ import io.airbyte.config.StandardCheckConnectionInput import io.airbyte.initContainer.system.FileClient import io.airbyte.metrics.lib.MetricClient import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig import io.airbyte.workers.CheckConnectionInputHydrator import io.airbyte.workers.models.CheckConnectionInput import io.airbyte.workers.models.SidecarInput @@ -58,9 +59,12 @@ class CheckHydrationProcessorTest { val input = Fixtures.workload val unhydrated = StandardCheckConnectionInput() - val parsed = CheckConnectionInput() - parsed.checkConnectionInput = unhydrated - parsed.launcherConfig = IntegrationLauncherConfig() + val parsed = + CheckConnectionInput( + jobRunConfig = JobRunConfig(), + checkConnectionInput = unhydrated, + launcherConfig = IntegrationLauncherConfig(), + ) val connectionConfiguration = Jsons.jsonNode(mapOf("key-1" to "value-1")) val hydrated = StandardCheckConnectionInput() diff --git a/airbyte-workload-init-container/src/test/kotlin/input/DiscoverHydrationProcessorTest.kt b/airbyte-workload-init-container/src/test/kotlin/input/DiscoverHydrationProcessorTest.kt index 93d85bec3fa..1d3a9c29dc8 100644 --- a/airbyte-workload-init-container/src/test/kotlin/input/DiscoverHydrationProcessorTest.kt +++ b/airbyte-workload-init-container/src/test/kotlin/input/DiscoverHydrationProcessorTest.kt @@ -5,6 +5,7 @@ import io.airbyte.config.StandardDiscoverCatalogInput import io.airbyte.initContainer.system.FileClient import io.airbyte.metrics.lib.MetricClient import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig import io.airbyte.workers.DiscoverCatalogInputHydrator import io.airbyte.workers.models.DiscoverCatalogInput import io.airbyte.workers.models.SidecarInput @@ -58,9 +59,12 @@ class DiscoverHydrationProcessorTest { val input = Fixtures.workload val unhydrated = StandardDiscoverCatalogInput() - val parsed = DiscoverCatalogInput() - parsed.discoverCatalogInput = unhydrated - parsed.launcherConfig = IntegrationLauncherConfig() + val parsed = + DiscoverCatalogInput( + jobRunConfig = JobRunConfig(), + discoverCatalogInput = unhydrated, + launcherConfig = IntegrationLauncherConfig(), + ) val connectionConfiguration = Jsons.jsonNode(mapOf("key-1" to "value-1")) val hydrated = StandardDiscoverCatalogInput() diff --git a/airbyte-workload-init-container/src/test/kotlin/input/ReplicationHydrationProcessorTest.kt b/airbyte-workload-init-container/src/test/kotlin/input/ReplicationHydrationProcessorTest.kt index 8f06260e8d7..629fdf8e5d5 100644 --- a/airbyte-workload-init-container/src/test/kotlin/input/ReplicationHydrationProcessorTest.kt +++ b/airbyte-workload-init-container/src/test/kotlin/input/ReplicationHydrationProcessorTest.kt @@ -86,8 +86,10 @@ class ReplicationHydrationProcessorTest { ), ), ) - val activityInput = ReplicationActivityInput() - activityInput.connectionId = UUID.randomUUID() + val activityInput = + ReplicationActivityInput( + connectionId = UUID.randomUUID(), + ) val hydrated = ReplicationInput() .withDestinationLauncherConfig(IntegrationLauncherConfig()) @@ -146,13 +148,12 @@ class ReplicationHydrationProcessorTest { companion object { // Validates empty or null states serialize as "{}" @JvmStatic - private fun stateMatrix(): Stream { - return Stream.of( + private fun stateMatrix(): Stream = + Stream.of( Arguments.of(State().withState(null), 0), Arguments.of(null, 0), Arguments.of(State().withState(Jsons.jsonNode("this is" to "nested for some reason")), 1), ) - } } object Fixtures { diff --git a/airbyte-workload-init-container/src/test/kotlin/input/SpecHydrationProcessorTest.kt b/airbyte-workload-init-container/src/test/kotlin/input/SpecHydrationProcessorTest.kt index 36fd1b58216..e6bfab0e756 100644 --- a/airbyte-workload-init-container/src/test/kotlin/input/SpecHydrationProcessorTest.kt +++ b/airbyte-workload-init-container/src/test/kotlin/input/SpecHydrationProcessorTest.kt @@ -2,6 +2,7 @@ package io.airbyte.initContainer.input import io.airbyte.initContainer.system.FileClient import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig import io.airbyte.workers.models.SidecarInput import io.airbyte.workers.models.SpecInput import io.airbyte.workers.pod.FileConstants @@ -45,8 +46,11 @@ class SpecHydrationProcessorTest { fun `parses input and writes output to expected file`() { val input = Fixtures.workload - val parsed = SpecInput() - parsed.launcherConfig = IntegrationLauncherConfig() + val parsed = + SpecInput( + jobRunConfig = JobRunConfig(), + launcherConfig = IntegrationLauncherConfig(), + ) val serializedInput = "serialized hydrated blob" diff --git a/airbyte-workload-launcher/build.gradle.kts b/airbyte-workload-launcher/build.gradle.kts index 5e037104cdf..bb55f1549be 100644 --- a/airbyte-workload-launcher/build.gradle.kts +++ b/airbyte-workload-launcher/build.gradle.kts @@ -21,6 +21,7 @@ dependencies { implementation(libs.google.cloud.storage) implementation(libs.guava) implementation(libs.kotlin.logging) + implementation(libs.micronaut.cache.caffeine) implementation(libs.micronaut.jackson.databind) implementation(libs.micronaut.jooq) implementation(libs.bundles.micronaut.kotlin) diff --git a/airbyte-workload-launcher/src/main/kotlin/config/PodFactoryBeanFactory.kt b/airbyte-workload-launcher/src/main/kotlin/config/PodFactoryBeanFactory.kt index 1bb242e7255..772c800612b 100644 --- a/airbyte-workload-launcher/src/main/kotlin/config/PodFactoryBeanFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/config/PodFactoryBeanFactory.kt @@ -15,7 +15,6 @@ import io.fabric8.kubernetes.api.model.EnvVar import io.fabric8.kubernetes.api.model.LocalObjectReference import io.fabric8.kubernetes.api.model.Toleration import io.micronaut.context.annotation.Factory -import io.micronaut.context.annotation.Value import jakarta.inject.Named import jakarta.inject.Singleton @@ -30,7 +29,6 @@ class PodFactoryBeanFactory { @Named("checkEnvVars") connectorEnvVars: List, @Named("sideCarEnvVars") sideCarEnvVars: List, @Named("sidecarKubeContainerInfo") sidecarContainerInfo: KubeContainerInfo, - @Value("\${airbyte.worker.job.kube.serviceAccount}") serviceAccount: String?, volumeFactory: VolumeFactory, initContainerFactory: InitContainerFactory, workloadSecurityContextProvider: WorkloadSecurityContextProvider, @@ -44,7 +42,6 @@ class PodFactoryBeanFactory { connectorEnvVars, sideCarEnvVars, sidecarContainerInfo, - serviceAccount, volumeFactory, initContainerFactory, mapOf( @@ -64,7 +61,6 @@ class PodFactoryBeanFactory { @Named("discoverEnvVars") connectorEnvVars: List, @Named("sideCarEnvVars") sideCarEnvVars: List, @Named("sidecarKubeContainerInfo") sidecarContainerInfo: KubeContainerInfo, - @Value("\${airbyte.worker.job.kube.serviceAccount}") serviceAccount: String?, volumeFactory: VolumeFactory, initContainerFactory: InitContainerFactory, workloadSecurityContextProvider: WorkloadSecurityContextProvider, @@ -78,7 +74,6 @@ class PodFactoryBeanFactory { connectorEnvVars, sideCarEnvVars, sidecarContainerInfo, - serviceAccount, volumeFactory, initContainerFactory, mapOf( @@ -98,7 +93,6 @@ class PodFactoryBeanFactory { @Named("specEnvVars") connectorEnvVars: List, @Named("sideCarEnvVars") sideCarEnvVars: List, @Named("sidecarKubeContainerInfo") sidecarContainerInfo: KubeContainerInfo, - @Value("\${airbyte.worker.job.kube.serviceAccount}") serviceAccount: String?, volumeFactory: VolumeFactory, initContainerFactory: InitContainerFactory, workloadSecurityContextProvider: WorkloadSecurityContextProvider, @@ -112,7 +106,6 @@ class PodFactoryBeanFactory { connectorEnvVars, sideCarEnvVars, sidecarContainerInfo, - serviceAccount, volumeFactory, initContainerFactory, mapOf(), diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/KubePodClient.kt b/airbyte-workload-launcher/src/main/kotlin/pods/KubePodClient.kt index 112c237a235..580f7b5c224 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/KubePodClient.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/KubePodClient.kt @@ -3,8 +3,6 @@ package io.airbyte.workload.launcher.pods import com.google.common.annotations.VisibleForTesting import datadog.trace.api.Trace import io.airbyte.commons.constants.WorkerConstants.KubeConstants.FULL_POD_TIMEOUT -import io.airbyte.featureflag.Context -import io.airbyte.featureflag.FeatureFlagClient import io.airbyte.metrics.lib.ApmTraceUtils import io.airbyte.persistence.job.models.ReplicationInput import io.airbyte.workers.exception.KubeClientException @@ -44,8 +42,6 @@ class KubePodClient( @Named("checkPodFactory") private val checkPodFactory: ConnectorPodFactory, @Named("discoverPodFactory") private val discoverPodFactory: ConnectorPodFactory, @Named("specPodFactory") private val specPodFactory: ConnectorPodFactory, - private val featureFlagClient: FeatureFlagClient, - @Named("infraFlagContexts") private val contexts: List, ) { fun podsExistForAutoId(autoId: UUID): Boolean { return kubePodLauncher.podsRunning(labeler.getAutoIdLabels(autoId)) @@ -56,7 +52,15 @@ class KubePodClient( replicationInput: ReplicationInput, launcherInput: LauncherInput, ) { - val sharedLabels = labeler.getSharedLabels(launcherInput.workloadId, launcherInput.mutexKey, launcherInput.labels, launcherInput.autoId) + val sharedLabels = + labeler.getSharedLabels( + launcherInput.workloadId, + launcherInput.mutexKey, + launcherInput.labels, + launcherInput.autoId, + replicationInput.workspaceId, + replicationInput.networkSecurityTokens, + ) val kubeInput = mapper.toKubeInput(launcherInput.workloadId, replicationInput, sharedLabels) var pod = @@ -107,7 +111,15 @@ class KubePodClient( replicationInput: ReplicationInput, launcherInput: LauncherInput, ) { - val sharedLabels = labeler.getSharedLabels(launcherInput.workloadId, launcherInput.mutexKey, launcherInput.labels, launcherInput.autoId) + val sharedLabels = + labeler.getSharedLabels( + launcherInput.workloadId, + launcherInput.mutexKey, + launcherInput.labels, + launcherInput.autoId, + replicationInput.workspaceId, + replicationInput.networkSecurityTokens, + ) val kubeInput = mapper.toKubeInput(launcherInput.workloadId, replicationInput, sharedLabels) var pod = @@ -160,8 +172,9 @@ class KubePodClient( mutexKey = launcherInput.mutexKey, passThroughLabels = launcherInput.labels, autoId = launcherInput.autoId, + workspaceId = checkInput.launcherConfig.workspaceId, + networkSecurityTokens = checkInput.checkConnectionInput.networkSecurityTokens, ) - val kubeInput = mapper.toKubeInput(launcherInput.workloadId, checkInput, sharedLabels) launchConnectorWithSidecar(kubeInput, checkPodFactory, launcherInput.workloadType.toOperationName()) @@ -178,6 +191,8 @@ class KubePodClient( mutexKey = launcherInput.mutexKey, passThroughLabels = launcherInput.labels, autoId = launcherInput.autoId, + workspaceId = discoverCatalogInput.launcherConfig.workspaceId, + networkSecurityTokens = discoverCatalogInput.discoverCatalogInput.networkSecurityTokens, ) val kubeInput = mapper.toKubeInput(launcherInput.workloadId, discoverCatalogInput, sharedLabels) @@ -196,6 +211,8 @@ class KubePodClient( mutexKey = launcherInput.mutexKey, passThroughLabels = launcherInput.labels, autoId = launcherInput.autoId, + null, + emptyList(), ) val kubeInput = mapper.toKubeInput(launcherInput.workloadId, specInput, sharedLabels) diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/PayloadKubeInputMapper.kt b/airbyte-workload-launcher/src/main/kotlin/pods/PayloadKubeInputMapper.kt index b99cd0e1fa1..2f0bedfc853 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/PayloadKubeInputMapper.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/PayloadKubeInputMapper.kt @@ -42,6 +42,7 @@ class PayloadKubeInputMapper( private val labeler: PodLabeler, private val podNameGenerator: PodNameGenerator, @Value("\${airbyte.worker.job.kube.namespace}") private val namespace: String?, + @Value("\${airbyte.worker.job.kube.connector-image-registry}") private val imageRegistry: String?, @Named("orchestratorKubeContainerInfo") private val orchestratorKubeContainerInfo: KubeContainerInfo, @Named("replicationWorkerConfigs") private val replicationWorkerConfigs: WorkerConfigs, @Named("checkWorkerConfigs") private val checkWorkerConfigs: WorkerConfigs, @@ -67,11 +68,11 @@ class PayloadKubeInputMapper( val orchestratorReqs = resourceRequirementsFactory.orchestrator(input) val orchRuntimeEnvVars = runTimeEnvVarFactory.orchestratorEnvVars(input, workloadId) - val sourceImage = input.sourceLauncherConfig.dockerImage + val sourceImage = input.sourceLauncherConfig.dockerImage.withImageRegistry() val sourceReqs = resourceRequirementsFactory.replSource(input) val sourceRuntimeEnvVars = runTimeEnvVarFactory.replicationConnectorEnvVars(input.sourceLauncherConfig, sourceReqs, input.useFileTransfer) - val destinationImage = input.destinationLauncherConfig.dockerImage + val destinationImage = input.destinationLauncherConfig.dockerImage.withImageRegistry() val destinationReqs = resourceRequirementsFactory.replDestination(input) val destinationRuntimeEnvVars = runTimeEnvVarFactory.replicationConnectorEnvVars(input.destinationLauncherConfig, destinationReqs, input.useFileTransfer) @@ -128,7 +129,7 @@ class PayloadKubeInputMapper( namespace, podName, KubeContainerInfo( - input.launcherConfig.dockerImage, + input.launcherConfig.dockerImage.withImageRegistry(), checkWorkerConfigs.jobImagePullPolicy, ), ) @@ -171,7 +172,7 @@ class PayloadKubeInputMapper( namespace, podName, KubeContainerInfo( - input.launcherConfig.dockerImage, + input.launcherConfig.dockerImage.withImageRegistry(), discoverWorkerConfigs.jobImagePullPolicy, ), ) @@ -214,7 +215,7 @@ class PayloadKubeInputMapper( namespace, podName, KubeContainerInfo( - input.launcherConfig.dockerImage, + input.launcherConfig.dockerImage.withImageRegistry(), specWorkerConfigs.jobImagePullPolicy, ), ) @@ -262,6 +263,30 @@ class PayloadKubeInputMapper( nodeSelectorOverride.toNodeSelectorMap() } } + + // Return an image ref with the image registry prefix, if the image registry is configured. + private fun String.withImageRegistry(): String { + if (imageRegistry.isNullOrEmpty()) { + return this + } + // Custom connectors may contain a fully-qualified image registry name, e.g. my.registry.com/my/image. + // In this case, we don't want to add an additional image registry prefix. + // + // In order to detect whether the connector already has an image registry, + // we follow this code: https://github.com/distribution/distribution/blob/2461543d988979529609e8cb6fca9ca190dc48da/reference/normalize.go#L64 + // If the image contains a slash and the string before the slash contains a "." or a ":" or is "localhost" + val i = this.indexOfFirst { it == '/' } + if (i != -1) { + val before = this.slice(0..i - 1) + if (before.contains('.') || before.contains(':') || before == "localhost") { + return this + } + } + + // Ensure there's a trailing slash between the image registry and the image ref + // by stripping the slash (no-op if it doesn't exit) and adding it back. + return "${imageRegistry.trimEnd('/')}/$this" + } } data class ReplicationKubeInput( diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/factories/ConnectorPodFactory.kt b/airbyte-workload-launcher/src/main/kotlin/pods/factories/ConnectorPodFactory.kt index 88d4653102d..185285be6ca 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/factories/ConnectorPodFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/factories/ConnectorPodFactory.kt @@ -29,7 +29,6 @@ data class ConnectorPodFactory( private val connectorEnvVars: List, private val sideCarEnvVars: List, private val sidecarContainerInfo: KubeContainerInfo, - private val serviceAccount: String?, private val volumeFactory: VolumeFactory, private val initContainerFactory: InitContainerFactory, private val connectorArgs: Map, @@ -64,7 +63,6 @@ data class ConnectorPodFactory( .endMetadata() .withNewSpec() .withSchedulerName(schedulerName) - .withServiceAccount(serviceAccount) .withAutomountServiceAccountToken(true) .withRestartPolicy("Never") .withContainers(sidecar, main) diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/factories/ReplicationPodFactory.kt b/airbyte-workload-launcher/src/main/kotlin/pods/factories/ReplicationPodFactory.kt index fbc93f03cbd..4621f2cc2ae 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/factories/ReplicationPodFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/factories/ReplicationPodFactory.kt @@ -11,7 +11,6 @@ import io.fabric8.kubernetes.api.model.Pod import io.fabric8.kubernetes.api.model.PodBuilder import io.fabric8.kubernetes.api.model.ResourceRequirements import io.fabric8.kubernetes.api.model.Toleration -import io.micronaut.context.annotation.Value import jakarta.inject.Named import jakarta.inject.Singleton import java.util.UUID @@ -23,7 +22,6 @@ data class ReplicationPodFactory( private val replContainerFactory: ReplicationContainerFactory, private val volumeFactory: VolumeFactory, private val workloadSecurityContextProvider: WorkloadSecurityContextProvider, - @Value("\${airbyte.worker.job.kube.serviceAccount}") private val serviceAccount: String?, @Named("replicationImagePullSecrets") private val imagePullSecrets: List, @Named("replicationPodTolerations") private val tolerations: List, ) { @@ -84,7 +82,6 @@ data class ReplicationPodFactory( .endMetadata() .withNewSpec() .withSchedulerName(schedulerName) - .withServiceAccount(serviceAccount) .withAutomountServiceAccountToken(true) .withRestartPolicy("Never") .withInitContainers(initContainer) @@ -145,7 +142,6 @@ data class ReplicationPodFactory( .endMetadata() .withNewSpec() .withSchedulerName(schedulerName) - .withServiceAccount(serviceAccount) .withAutomountServiceAccountToken(true) .withRestartPolicy("Never") .withInitContainers(initContainer) diff --git a/airbyte-workload-launcher/src/main/resources/application-cloud.yml b/airbyte-workload-launcher/src/main/resources/application-cloud.yml index c9fe268abe9..abc2069fb9e 100644 --- a/airbyte-workload-launcher/src/main/resources/application-cloud.yml +++ b/airbyte-workload-launcher/src/main/resources/application-cloud.yml @@ -12,3 +12,6 @@ micronaut: enabled: true openid: issuer: ${KEYCLOAK_INTERNAL_REALM_ISSUER:} +airbyte: + workload-launcher: + network-policy-introspection: true diff --git a/airbyte-workload-launcher/src/main/resources/application.yml b/airbyte-workload-launcher/src/main/resources/application.yml index 177dc282feb..8cc3cc18b0a 100644 --- a/airbyte-workload-launcher/src/main/resources/application.yml +++ b/airbyte-workload-launcher/src/main/resources/application.yml @@ -19,7 +19,10 @@ micronaut: step: ${MICROMETER_METRICS_STEP:PT30S} host: ${STATSD_HOST:localhost} port: ${STATSD_PORT:8125} - + caches: + network-policy-label-cache: + expire-after-write: 10m + maximum-size: 100_000 airbyte: connector: source: @@ -78,6 +81,7 @@ airbyte: high-priority-queue: parallelism: ${WORKLOAD_LAUNCHER_PARALLELISM:10} workflow-parallelism: ${WORKLOAD_LAUNCHER_WORKFLOW_PARALLELISM:10} + network-policy-introspection: false secret: persistence: ${SECRET_PERSISTENCE:TESTING_CONFIG_DB_TABLE} store: @@ -132,6 +136,7 @@ airbyte: dsn: ${JOB_ERROR_REPORTING_SENTRY_DSN} strategy: ${JOB_ERROR_REPORTING_STRATEGY:LOGGING} kube: + connector-image-registry: ${JOB_KUBE_CONNECTOR_IMAGE_REGISTRY:} init: container: image: ${WORKLOAD_INIT_IMAGE:} @@ -142,7 +147,6 @@ airbyte: image-pull-policy: ${JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_POLICY:IfNotPresent} image-pull-secret: ${JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET:} namespace: ${JOB_KUBE_NAMESPACE:default} - serviceAccount: ${JOB_KUBE_SERVICEACCOUNT:airbyte-admin} # by default, Airbyte on Kube ships with the `airbyte-admin` service account. If that ever changes, this default should change with it tolerations: ${JOB_KUBE_TOLERATIONS:} sidecar: container: @@ -236,6 +240,7 @@ airbyte: state: ${STORAGE_BUCKET_STATE} workload-output: ${STORAGE_BUCKET_WORKLOAD_OUTPUT} activity-payload: ${STORAGE_BUCKET_ACTIVITY_PAYLOAD} + audit-logging: ${STORAGE_BUCKET_AUDIT_LOGGING} azure: connection-string: ${AZURE_STORAGE_CONNECTION_STRING} gcs: diff --git a/airbyte-workload-launcher/src/test/kotlin/pipeline/stages/BuildInputStageTest.kt b/airbyte-workload-launcher/src/test/kotlin/pipeline/stages/BuildInputStageTest.kt index 0e84a67032a..becb7613b6f 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pipeline/stages/BuildInputStageTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pipeline/stages/BuildInputStageTest.kt @@ -40,10 +40,13 @@ class BuildInputStageTest { val destConfig = POJONode("baz") val sourceConfig1 = POJONode("hello") val destConfig1 = POJONode("world") - val input = ReplicationActivityInput() - input.connectionId = UUID.randomUUID() - input.workspaceId = UUID.randomUUID() - input.jobRunConfig = JobRunConfig().withJobId("1").withAttemptId(0L) + val input = + ReplicationActivityInput( + connectionId = UUID.randomUUID(), + workspaceId = UUID.randomUUID(), + jobRunConfig = JobRunConfig().withJobId("1").withAttemptId(0L), + ) + val mapped = ReplicationInput() .withSourceConfiguration(sourceConfig) @@ -90,17 +93,19 @@ class BuildInputStageTest { @Test fun `deserializes check input`() { val inputStr = "foo" - val checkInput = CheckConnectionInput() - checkInput.launcherConfig = IntegrationLauncherConfig().withWorkspaceId(UUID.randomUUID()) + val input = StandardCheckConnectionInput() .withActorId(UUID.randomUUID()) .withAdditionalProperty("whatever", "random value") .withActorType(ActorType.DESTINATION) + val deserialized = - checkInput.apply { - checkConnectionInput = input - } + CheckConnectionInput( + launcherConfig = IntegrationLauncherConfig().withWorkspaceId(UUID.randomUUID()), + jobRunConfig = JobRunConfig().withJobId("1").withAttemptId(0L), + checkConnectionInput = input, + ) val replicationInputMapper: ReplicationInputMapper = mockk() val deserializer: PayloadDeserializer = mockk() @@ -128,17 +133,18 @@ class BuildInputStageTest { @Test fun `deserializes discover input`() { val inputStr = "foo" - val discoverInput = DiscoverCatalogInput() - discoverInput.launcherConfig = IntegrationLauncherConfig().withWorkspaceId(UUID.randomUUID()) val input = StandardDiscoverCatalogInput() .withSourceId(UUID.randomUUID().toString()) .withConfigHash(UUID.randomUUID().toString()) .withAdditionalProperty("whatever", "random value") + val deserialized = - discoverInput.apply { - discoverCatalogInput = input - } + DiscoverCatalogInput( + discoverCatalogInput = input, + launcherConfig = IntegrationLauncherConfig().withWorkspaceId(UUID.randomUUID()), + jobRunConfig = JobRunConfig().withJobId("1").withAttemptId(0L), + ) val replicationInputMapper: ReplicationInputMapper = mockk() val deserializer: PayloadDeserializer = mockk() @@ -167,7 +173,7 @@ class BuildInputStageTest { @Test fun `deserializes spec input`() { val inputStr = "foo" - val specInput = SpecInput() + val specInput = mockk() val replicationInputMapper: ReplicationInputMapper = mockk() val deserializer: PayloadDeserializer = mockk() diff --git a/airbyte-workload-launcher/src/test/kotlin/pipeline/stages/LaunchPodStageTest.kt b/airbyte-workload-launcher/src/test/kotlin/pipeline/stages/LaunchPodStageTest.kt index 5a09b297224..c197f55cf8a 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pipeline/stages/LaunchPodStageTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pipeline/stages/LaunchPodStageTest.kt @@ -5,6 +5,9 @@ package io.airbyte.workload.launcher.pipeline.stages import fixtures.RecordFixtures +import io.airbyte.config.StandardCheckConnectionInput +import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig import io.airbyte.persistence.job.models.ReplicationInput import io.airbyte.workers.models.CheckConnectionInput import io.airbyte.workers.models.DiscoverCatalogInput @@ -66,7 +69,14 @@ class LaunchPodStageTest { @Test fun `launches check`() { - val checkInput = CheckConnectionInput() + val launcherConfig: IntegrationLauncherConfig = mockk() + val checkConnectionInput: StandardCheckConnectionInput = mockk() + val checkInput = + CheckConnectionInput( + jobRunConfig = JobRunConfig(), + launcherConfig = launcherConfig, + checkConnectionInput = checkConnectionInput, + ) val payload = CheckPayload(checkInput) val launcher: KubePodClient = mockk() @@ -88,7 +98,12 @@ class LaunchPodStageTest { @Test fun `launches discover`() { - val discoverInput = DiscoverCatalogInput() + val discoverInput = + DiscoverCatalogInput( + jobRunConfig = mockk(), + launcherConfig = mockk(), + discoverCatalogInput = mockk(), + ) val payload = DiscoverCatalogPayload(discoverInput) val launcher: KubePodClient = mockk() diff --git a/airbyte-workload-launcher/src/test/kotlin/pods/KubePodClientTest.kt b/airbyte-workload-launcher/src/test/kotlin/pods/KubePodClientTest.kt index f78f10cf6cf..fe05a8df49b 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pods/KubePodClientTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pods/KubePodClientTest.kt @@ -6,6 +6,8 @@ package io.airbyte.workload.launcher.pods import fixtures.RecordFixtures import io.airbyte.commons.json.Jsons +import io.airbyte.config.StandardCheckConnectionInput +import io.airbyte.config.StandardDiscoverCatalogInput import io.airbyte.config.WorkloadType import io.airbyte.featureflag.TestClient import io.airbyte.persistence.job.models.IntegrationLauncherConfig @@ -44,7 +46,6 @@ import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.api.Test import org.junit.jupiter.api.assertThrows import org.junit.jupiter.api.extension.ExtendWith -import java.lang.RuntimeException import java.util.UUID import java.util.concurrent.TimeoutException @@ -103,8 +104,6 @@ class KubePodClientTest { checkPodFactory = checkPodFactory, discoverPodFactory = discoverPodFactory, specPodFactory = specPodFactory, - featureFlagClient = featureFlagClient, - contexts = listOf(), ) replInput = @@ -127,14 +126,14 @@ class KubePodClientTest { CheckConnectionInput( JobRunConfig().withJobId("jobId").withAttemptId(1), IntegrationLauncherConfig().withDockerImage("dockerImage").withWorkspaceId(workspaceId), - null, + StandardCheckConnectionInput(), ) discoverInput = DiscoverCatalogInput( JobRunConfig().withJobId("jobId").withAttemptId(1), IntegrationLauncherConfig().withDockerImage("dockerImage").withWorkspaceId(workspaceId), - null, + StandardDiscoverCatalogInput(), ) specInput = @@ -143,7 +142,7 @@ class KubePodClientTest { IntegrationLauncherConfig().withDockerImage("dockerImage").withWorkspaceId(workspaceId), ) - every { labeler.getSharedLabels(any(), any(), any(), any()) } returns sharedLabels + every { labeler.getSharedLabels(any(), any(), any(), any(), any(), any()) } returns sharedLabels every { mapper.toKubeInput(WORKLOAD_ID, checkInput, sharedLabels) } returns connectorKubeInput every { mapper.toKubeInput(WORKLOAD_ID, discoverInput, sharedLabels) } returns connectorKubeInput @@ -223,8 +222,21 @@ class KubePodClientTest { every { mapper.toKubeInput(WORKLOAD_ID, replInput, any()) } returns replicationKubeInput every { replicationPodFactory.create( - any(), any(), any(), any(), any(), any(), any(), any(), - any(), any(), any(), any(), any(), any(), any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), ) } returns Pod() every { launcher.create(any()) } throws RuntimeException("bang") @@ -239,8 +251,21 @@ class KubePodClientTest { every { mapper.toKubeInput(WORKLOAD_ID, replInput, any()) } returns replicationKubeInput every { replicationPodFactory.create( - any(), any(), any(), any(), any(), any(), any(), - any(), any(), any(), any(), any(), any(), any(), any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), ) } returns pod every { launcher.waitForPodInitComplete(pod, POD_INIT_TIMEOUT_VALUE) } throws TimeoutException("bang") @@ -300,8 +325,18 @@ class KubePodClientTest { every { mapper.toKubeInput(WORKLOAD_ID, replInput, any()) } returns replicationKubeInput every { replicationPodFactory.createReset( - any(), any(), any(), any(), any(), any(), any(), any(), - any(), any(), any(), any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), ) } returns Pod() every { launcher.create(any()) } throws RuntimeException("bang") @@ -316,8 +351,18 @@ class KubePodClientTest { every { mapper.toKubeInput(WORKLOAD_ID, replInput, any()) } returns replicationKubeInput every { replicationPodFactory.createReset( - any(), any(), any(), any(), any(), any(), any(), - any(), any(), any(), any(), any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), + any(), ) } returns pod every { launcher.waitForPodInitComplete(pod, POD_INIT_TIMEOUT_VALUE) } throws TimeoutException("bang") @@ -468,10 +513,14 @@ class KubePodClientTest { "orchestrator-image", "source-image", "destination-image", - io.fabric8.kubernetes.api.model.ResourceRequirements(), - io.fabric8.kubernetes.api.model.ResourceRequirements(), - io.fabric8.kubernetes.api.model.ResourceRequirements(), - io.fabric8.kubernetes.api.model.ResourceRequirements(), + io.fabric8.kubernetes.api.model + .ResourceRequirements(), + io.fabric8.kubernetes.api.model + .ResourceRequirements(), + io.fabric8.kubernetes.api.model + .ResourceRequirements(), + io.fabric8.kubernetes.api.model + .ResourceRequirements(), emptyList(), emptyList(), emptyList(), @@ -484,8 +533,10 @@ class KubePodClientTest { mapOf("test-selector" to "val3"), KubePodInfo("test-namespace", "test-name", null), mapOf("test-annotation" to "val5"), - io.fabric8.kubernetes.api.model.ResourceRequirements(), - io.fabric8.kubernetes.api.model.ResourceRequirements(), + io.fabric8.kubernetes.api.model + .ResourceRequirements(), + io.fabric8.kubernetes.api.model + .ResourceRequirements(), listOf(EnvVar("extra-env", "val6", null)), workspaceId, ) diff --git a/airbyte-workload-launcher/src/test/kotlin/pods/PayloadKubeInputMapperTest.kt b/airbyte-workload-launcher/src/test/kotlin/pods/PayloadKubeInputMapperTest.kt index 7b4581c88e2..713781af536 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pods/PayloadKubeInputMapperTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pods/PayloadKubeInputMapperTest.kt @@ -2,10 +2,12 @@ package io.airbyte.workload.launcher.pods import com.fasterxml.jackson.databind.JsonNode import io.airbyte.commons.workers.config.WorkerConfigs +import io.airbyte.config.ActorContext import io.airbyte.config.ActorType import io.airbyte.config.ResourceRequirements import io.airbyte.config.StandardCheckConnectionInput import io.airbyte.config.StandardDiscoverCatalogInput +import io.airbyte.config.SyncResourceRequirements import io.airbyte.config.WorkloadPriority import io.airbyte.featureflag.ConnectorApmEnabled import io.airbyte.featureflag.ContainerOrchestratorDevImage @@ -23,6 +25,7 @@ import io.airbyte.workers.models.SpecInput import io.airbyte.workers.pod.KubeContainerInfo import io.airbyte.workers.pod.PodLabeler import io.airbyte.workers.pod.PodNameGenerator +import io.airbyte.workers.pod.PodNetworkSecurityLabeler import io.airbyte.workers.pod.ResourceConversionUtils import io.airbyte.workload.launcher.model.getActorType import io.airbyte.workload.launcher.model.getAttemptId @@ -50,6 +53,7 @@ class PayloadKubeInputMapperTest { fun `builds a kube input from a replication payload`(useCustomConnector: Boolean) { val labeler: PodLabeler = mockk() val namespace = "test-namespace" + val imageRegistry = null val podName = "a-repl-pod" val podNameGenerator: PodNameGenerator = mockk() every { podNameGenerator.getReplicationPodName(any(), any()) } returns podName @@ -76,6 +80,7 @@ class PayloadKubeInputMapperTest { labeler, podNameGenerator, namespace, + imageRegistry, containerInfo, replConfigs, checkConfigs, @@ -187,6 +192,7 @@ class PayloadKubeInputMapperTest { val labeler: PodLabeler = mockk() val namespace = "test-namespace" + val imageRegistry = null val podName = "check-pod" val podNameGenerator: PodNameGenerator = mockk() every { podNameGenerator.getCheckPodName(any(), any(), any()) } returns podName @@ -212,6 +218,7 @@ class PayloadKubeInputMapperTest { labeler, podNameGenerator, namespace, + imageRegistry, orchestratorContainerInfo, replConfigs, checkConfigs, @@ -298,6 +305,7 @@ class PayloadKubeInputMapperTest { val labeler: PodLabeler = mockk() val namespace = "test-namespace" + val imageRegistry = null val podName = "check-pod" val podNameGenerator: PodNameGenerator = mockk() every { podNameGenerator.getDiscoverPodName(any(), any(), any()) } returns podName @@ -323,6 +331,7 @@ class PayloadKubeInputMapperTest { labeler, podNameGenerator, namespace, + imageRegistry, orchestratorContainerInfo, replConfigs, checkConfigs, @@ -407,6 +416,7 @@ class PayloadKubeInputMapperTest { val labeler: PodLabeler = mockk() val namespace = "test-namespace" + val imageRegistry = null val podName = "check-pod" val podNameGenerator: PodNameGenerator = mockk() every { podNameGenerator.getSpecPodName(any(), any(), any()) } returns podName @@ -430,6 +440,7 @@ class PayloadKubeInputMapperTest { labeler, podNameGenerator, namespace, + imageRegistry, orchestratorContainerInfo, replConfigs, checkConfigs, @@ -440,9 +451,7 @@ class PayloadKubeInputMapperTest { ffClient, listOf(), ) - val input: SpecInput = mockk() - mockkStatic("io.airbyte.workload.launcher.model.SpecInputExtensionsKt") val jobId = "415" val attemptId = 7654L val imageName = "image-name" @@ -458,10 +467,13 @@ class PayloadKubeInputMapperTest { every { envVarFactory.specConnectorEnvVars(workloadId) } returns expectedEnv val jobRunConfig = mockk() + val input: SpecInput = mockk() + mockkStatic("io.airbyte.workload.launcher.model.SpecInputExtensionsKt") every { input.getJobId() } returns jobId every { input.getAttemptId() } returns attemptId every { input.jobRunConfig } returns jobRunConfig every { input.launcherConfig } returns launcherConfig + val resourceReqs1 = ResourceRequirements() .withCpuLimit("1") @@ -497,6 +509,162 @@ class PayloadKubeInputMapperTest { assertEquals(mapOf("node-pool" to "my-env-pool", "other" to "value"), result) } + @Test + fun `prefixes images with a custom image registry`() { + val ffClient = TestClient() + val envVarFactory: RuntimeEnvVarFactory = mockk() + val podNetworkSecurityLabeler: PodNetworkSecurityLabeler = mockk() + val labeler = PodLabeler(podNetworkSecurityLabeler) + val podNameGenerator = PodNameGenerator("test-ns") + val orchestratorContainerInfo = KubeContainerInfo("orch-img", "Always") + val reqs = ResourceRequirements() + val resourceReqFactory = ResourceRequirementsFactory(reqs, reqs, reqs, reqs, reqs) + val workerConfigs = WorkerConfigs(reqs, emptyList(), emptyMap(), Optional.empty(), emptyMap(), emptyMap(), emptyList(), "Always") + val workloadId = "workload-1" + val jobConfig = + JobRunConfig().apply { + jobId = "job-1" + attemptId = 1 + } + + every { envVarFactory.specConnectorEnvVars(any()) } returns emptyList() + every { envVarFactory.checkConnectorEnvVars(any(), any(), any()) } returns emptyList() + every { envVarFactory.discoverConnectorEnvVars(any(), any(), any()) } returns emptyList() + every { envVarFactory.orchestratorEnvVars(any(), any()) } returns emptyList() + every { envVarFactory.replicationConnectorEnvVars(any(), any(), any()) } returns emptyList() + + every { podNetworkSecurityLabeler.getLabels(any(), any()) } returns emptyMap() + + val testConfig = + IntegrationLauncherConfig().apply { + dockerImage = "test-img" + this.workspaceId = UUID.randomUUID() + isCustomConnector = false + } + + val checkConnectionInput = + StandardCheckConnectionInput().apply { + connectionConfiguration = mockk() + actorContext = ActorContext().withOrganizationId(UUID.randomUUID()) + resourceRequirements = reqs + } + + val discoverCatalogInput = + StandardDiscoverCatalogInput().apply { + actorContext = ActorContext().withOrganizationId(UUID.randomUUID()) + } + + val specInput: SpecInput = mockk() + mockkStatic("io.airbyte.workload.launcher.model.SpecInputExtensionsKt") + every { specInput.getJobId() } returns "job-1" + every { specInput.getAttemptId() } returns 1 + every { specInput.jobRunConfig } returns jobConfig + every { specInput.launcherConfig } returns testConfig + + val checkInput: CheckConnectionInput = mockk() + mockkStatic("io.airbyte.workload.launcher.model.CheckConnectionInputExtensionsKt") + every { checkInput.jobRunConfig } returns jobConfig + every { checkInput.launcherConfig } returns testConfig + every { checkInput.checkConnectionInput } returns checkConnectionInput + + val discoverInput: DiscoverCatalogInput = mockk() + mockkStatic("io.airbyte.workload.launcher.model.DiscoverCatalogInputExtensionsKt") + every { discoverInput.getJobId() } returns "job-1" + every { discoverInput.getAttemptId() } returns 1 + every { discoverInput.jobRunConfig } returns jobConfig + every { discoverInput.launcherConfig } returns testConfig + every { discoverInput.discoverCatalogInput } returns discoverCatalogInput + + val replInput: ReplicationInput = mockk() + mockkStatic("io.airbyte.workers.input.ReplicationInputExtensionsKt") + every { replInput.connectionId } returns UUID.randomUUID() + every { replInput.jobRunConfig } returns jobConfig + every { replInput.sourceLauncherConfig } returns testConfig + every { replInput.destinationLauncherConfig } returns testConfig + every { replInput.syncResourceRequirements } returns SyncResourceRequirements() + every { replInput.useFileTransfer } returns false + + var mapper = + PayloadKubeInputMapper( + labeler, + podNameGenerator, + "test-ns", + "custom-image-registry", + orchestratorContainerInfo, + workerConfigs, + workerConfigs, + workerConfigs, + workerConfigs, + resourceReqFactory, + envVarFactory, + ffClient, + listOf(), + ) + + mapper.toKubeInput(workloadId, specInput, emptyMap()).also { + assertEquals("custom-image-registry/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, checkInput, emptyMap()).also { + assertEquals("custom-image-registry/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, discoverInput, emptyMap()).also { + assertEquals("custom-image-registry/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, replInput, emptyMap()).also { + assertEquals("custom-image-registry/test-img", it.sourceImage) + assertEquals("custom-image-registry/test-img", it.destinationImage) + } + + // Now test a mapper with an image registry with a trailing slash. + mapper = + PayloadKubeInputMapper( + labeler, + podNameGenerator, + "test-ns", + "custom-image-registry/", + orchestratorContainerInfo, + workerConfigs, + workerConfigs, + workerConfigs, + workerConfigs, + resourceReqFactory, + envVarFactory, + ffClient, + listOf(), + ) + mapper.toKubeInput(workloadId, specInput, emptyMap()).also { + assertEquals("custom-image-registry/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, checkInput, emptyMap()).also { + assertEquals("custom-image-registry/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, discoverInput, emptyMap()).also { + assertEquals("custom-image-registry/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, replInput, emptyMap()).also { + assertEquals("custom-image-registry/test-img", it.sourceImage) + assertEquals("custom-image-registry/test-img", it.destinationImage) + } + + // Now test that custom connectors which define a fully-qualified image (i.e. image includes registry domain) + // will not get the custom registry prefix. + testConfig.dockerImage = "my.registry.com/test-img" + + mapper.toKubeInput(workloadId, specInput, emptyMap()).also { + assertEquals("my.registry.com/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, checkInput, emptyMap()).also { + assertEquals("my.registry.com/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, discoverInput, emptyMap()).also { + assertEquals("my.registry.com/test-img", it.kubePodInfo.mainContainerInfo.image) + } + mapper.toKubeInput(workloadId, replInput, emptyMap()).also { + assertEquals("my.registry.com/test-img", it.sourceImage) + assertEquals("my.registry.com/test-img", it.destinationImage) + } + } + companion object { @JvmStatic private fun replicationFlagsInputMatrix(): Stream = diff --git a/airbyte-workload-launcher/src/test/kotlin/pods/factories/ConnectorPodFactoryTest.kt b/airbyte-workload-launcher/src/test/kotlin/pods/factories/ConnectorPodFactoryTest.kt index baa4a55539f..fb729de40de 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pods/factories/ConnectorPodFactoryTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pods/factories/ConnectorPodFactoryTest.kt @@ -148,7 +148,6 @@ class ConnectorPodFactoryTest { connectorEnvVars = emptyList(), sideCarEnvVars = emptyList(), sidecarContainerInfo = KubeContainerInfo("sidecar-image", "Always"), - serviceAccount = "test-sa", volumeFactory = defaultVolumeFactory, initContainerFactory = InitContainerFactory( diff --git a/airbyte-workload-launcher/src/test/kotlin/pods/factories/ReplicationPodFactoryTest.kt b/airbyte-workload-launcher/src/test/kotlin/pods/factories/ReplicationPodFactoryTest.kt index 761757d7374..07ef510ad41 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pods/factories/ReplicationPodFactoryTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pods/factories/ReplicationPodFactoryTest.kt @@ -143,7 +143,6 @@ class ReplicationPodFactoryTest { ), volumeFactory = defaultVolumeFactory, workloadSecurityContextProvider = workloadSecurityContextProvider, - serviceAccount = "test-sa", imagePullSecrets = emptyList(), tolerations = emptyList(), ) diff --git a/airbyte-workload-launcher/src/test/kotlin/pods/factories/ResourceRequirementsFactoryTest.kt b/airbyte-workload-launcher/src/test/kotlin/pods/factories/ResourceRequirementsFactoryTest.kt index 5e367c12390..ef2f3a30d1f 100644 --- a/airbyte-workload-launcher/src/test/kotlin/pods/factories/ResourceRequirementsFactoryTest.kt +++ b/airbyte-workload-launcher/src/test/kotlin/pods/factories/ResourceRequirementsFactoryTest.kt @@ -10,6 +10,7 @@ import io.airbyte.workers.models.DiscoverCatalogInput import io.airbyte.workers.pod.ResourceConversionUtils import io.airbyte.workload.launcher.pods.factories.ResourceRequirementsFactory import io.mockk.every +import io.mockk.mockk import io.mockk.spyk import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.BeforeEach @@ -63,8 +64,7 @@ class ResourceRequirementsFactoryTest { .withSyncResourceRequirements( SyncResourceRequirements() .withSource(reqs), - ) - .withUseFileTransfer(false) + ).withUseFileTransfer(false) val result = factory.replSource(input) @@ -79,8 +79,7 @@ class ResourceRequirementsFactoryTest { .withSyncResourceRequirements( SyncResourceRequirements() .withSource(reqs), - ) - .withUseFileTransfer(true) + ).withUseFileTransfer(true) val result = factory.replSource(input) @@ -119,8 +118,8 @@ class ResourceRequirementsFactoryTest { fun `builds check connector reqs from input if provided`(reqs: AirbyteResourceRequirements) { val input = CheckConnectionInput( - null, - null, + mockk(), + mockk(), StandardCheckConnectionInput() .withResourceRequirements(reqs), ) @@ -136,8 +135,8 @@ class ResourceRequirementsFactoryTest { fun `builds check connector reqs from defaults if not provided`() { val input = CheckConnectionInput( - null, - null, + mockk(), + mockk(), StandardCheckConnectionInput(), ) @@ -151,8 +150,8 @@ class ResourceRequirementsFactoryTest { fun `builds discover connector reqs from input if provided`(reqs: AirbyteResourceRequirements) { val input = DiscoverCatalogInput( - null, - null, + mockk(), + mockk(), StandardDiscoverCatalogInput() .withResourceRequirements(reqs), ) @@ -168,8 +167,8 @@ class ResourceRequirementsFactoryTest { fun `builds discover connector reqs from defaults if not provided`() { val input = DiscoverCatalogInput( - null, - null, + mockk(), + mockk(), StandardDiscoverCatalogInput(), ) @@ -203,7 +202,12 @@ class ResourceRequirementsFactoryTest { @ParameterizedTest @MethodSource("nonNullRequirementsMatrix") fun `builds check init reqs from sum of connector and sidecar config`(reqs: AirbyteResourceRequirements) { - val input = CheckConnectionInput() + val input = + CheckConnectionInput( + jobRunConfig = mockk(), + launcherConfig = mockk(), + checkConnectionInput = mockk(), + ) val spy = spyk(factory) every { spy.checkConnector(input) } returns reqs @@ -217,7 +221,12 @@ class ResourceRequirementsFactoryTest { @ParameterizedTest @MethodSource("nonNullRequirementsMatrix") fun `builds discover init reqs from sum of connector and sidecar config`(reqs: AirbyteResourceRequirements) { - val input = DiscoverCatalogInput() + val input = + DiscoverCatalogInput( + jobRunConfig = mockk(), + launcherConfig = mockk(), + discoverCatalogInput = mockk(), + ) val spy = spyk(factory) every { spy.discoverConnector(input) } returns reqs diff --git a/build.gradle b/build.gradle index 065041e548b..f9bc9379e64 100644 --- a/build.gradle +++ b/build.gradle @@ -25,12 +25,12 @@ buildscript { plugins { id "base" id "com.dorongold.task-tree" version "2.1.1" - id "io.airbyte.gradle.jvm" version "0.39.0" apply false - id "io.airbyte.gradle.jvm.app" version "0.39.0" apply false - id "io.airbyte.gradle.jvm.lib" version "0.39.0" apply false - id "io.airbyte.gradle.docker" version "0.39.0" apply false - id "io.airbyte.gradle.publish" version "0.39.0" apply false - id "io.airbyte.gradle.kube-reload" version "0.39.0" apply false + id "io.airbyte.gradle.jvm" version "0.40.0" apply false + id "io.airbyte.gradle.jvm.app" version "0.40.0" apply false + id "io.airbyte.gradle.jvm.lib" version "0.40.0" apply false + id "io.airbyte.gradle.docker" version "0.40.0" apply false + id "io.airbyte.gradle.publish" version "0.40.0" apply false + id "io.airbyte.gradle.kube-reload" version "0.40.0" apply false // uncomment for testing plugin locally // id "io.airbyte.gradle.jvm" version "local-test" apply false // id "io.airbyte.gradle.jvm.app" version "local-test" apply false diff --git a/charts/airbyte-workload-api-server/templates/deployment.yaml b/charts/airbyte-workload-api-server/templates/deployment.yaml index 9eb4fcddec4..92343d52f5a 100644 --- a/charts/airbyte-workload-api-server/templates/deployment.yaml +++ b/charts/airbyte-workload-api-server/templates/deployment.yaml @@ -108,6 +108,24 @@ spec: secretKeyRef: name: {{ index .Values "workloadApi" "bearerTokenSecretName" | default (printf "%s-airbyte-secrets" .Release.Name ) }} key: {{ index .Values "workloadApi" "bearerTokenSecretKey" | default "WORKLOAD_API_BEARER_TOKEN" }} + - name: MICRONAUT_ENVIRONMENTS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKERS_MICRONAUT_ENVIRONMENTS + {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} + - name: AIRBYTE_API_AUTH_HEADER_NAME + value: "X-Airbyte-Auth" + - name: AIRBYTE_API_AUTH_HEADER_VALUE + value: "Internal worker" + {{- else if and (eq .Values.global.deploymentMode "oss") .Values.global.auth.enabled }} + # Self-Managed Enterprise and Community w/ auth enabled use the same auth header, just + # splitting into two separate blocks for readability. + - name: AIRBYTE_API_AUTH_HEADER_NAME + value: "X-Airbyte-Auth" + - name: AIRBYTE_API_AUTH_HEADER_VALUE + value: "Internal worker" + {{- end }} # Database {{- include "airbyte.database.envs" . | nindent 8 }} diff --git a/charts/airbyte-workload-launcher/templates/deployment.yaml b/charts/airbyte-workload-launcher/templates/deployment.yaml index ce0dfa93dbd..f354a6b71b7 100644 --- a/charts/airbyte-workload-launcher/templates/deployment.yaml +++ b/charts/airbyte-workload-launcher/templates/deployment.yaml @@ -185,6 +185,11 @@ spec: name: {{ .Release.Name }}-airbyte-env key: JOB_KUBE_TOLERATIONS {{- end }} + - name: JOB_KUBE_CONNECTOR_IMAGE_REGISTRY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_CONNECTOR_IMAGE_REGISTRY - name: CONNECTOR_SIDECAR_IMAGE valueFrom: configMapKeyRef: @@ -210,13 +215,11 @@ spec: configMapKeyRef: name: {{ .Release.Name }}-airbyte-env key: JOB_KUBE_CURL_IMAGE - {{- if $.Values.global.jobs.kube.main_container_image_pull_secret }} - name: JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET valueFrom: configMapKeyRef: name: {{ .Release.Name }}-airbyte-env key: JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET - {{- end }} - name: JOB_MAIN_CONTAINER_CPU_REQUEST valueFrom: configMapKeyRef: diff --git a/charts/airbyte/airbyte-pro-values.yaml b/charts/airbyte/airbyte-pro-values.yaml index 1ee67eefb46..954f73885cd 100644 --- a/charts/airbyte/airbyte-pro-values.yaml +++ b/charts/airbyte/airbyte-pro-values.yaml @@ -13,3 +13,7 @@ keycloak-setup: server: env_vars: API_AUTHORIZATION_ENABLED: "true" + +worker: + env_vars: + DISCOVER_REFRESH_WINDOW_MINUTES: "0" diff --git a/charts/airbyte/templates/env-configmap.yaml b/charts/airbyte/templates/env-configmap.yaml index 99f85ae9554..a28b65f205c 100644 --- a/charts/airbyte/templates/env-configmap.yaml +++ b/charts/airbyte/templates/env-configmap.yaml @@ -96,14 +96,20 @@ data: WORKLOAD_INIT_IMAGE: {{ include "imageUrl" (list $workloadLauncher.workloadInit.image $) }} CONNECTOR_SIDECAR_IMAGE: {{ include "imageUrl" (list $workloadLauncher.connectorSidecar.image $) }} + JOB_KUBE_CONNECTOR_IMAGE_REGISTRY: {{ $.Values.global.image.registry | quote }} JOB_MAIN_CONTAINER_CPU_LIMIT: {{ ((.Values.global.jobs.resources | default dict).limits | default dict).cpu | default "" | quote }} JOB_MAIN_CONTAINER_CPU_REQUEST: {{ ((.Values.global.jobs.resources | default dict).requests | default dict).cpu | default "" | quote }} JOB_MAIN_CONTAINER_MEMORY_LIMIT: {{ ((.Values.global.jobs.resources | default dict).limits | default dict).memory | default "" | quote }} JOB_MAIN_CONTAINER_MEMORY_REQUEST: {{ ((.Values.global.jobs.resources | default dict).requests | default dict).memory | default "" | quote }} + {{- $imagePullSecrets := (list) }} + {{- range $.Values.global.imagePullSecrets -}}{{- $imagePullSecrets = append $imagePullSecrets .name -}}{{- end }} {{- if $.Values.global.jobs.kube.main_container_image_pull_secret }} - JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET: {{ $.Values.global.jobs.kube.main_container_image_pull_secret }} + {{- $imagePullSecrets = append $imagePullSecrets $.Values.global.jobs.kube.main_container_image_pull_secret }} {{- end }} + + JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET: {{ join "," $imagePullSecrets }} + JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION: "0.29.15.001" JOB_KUBE_LOCAL_VOLUME_ENABLED: {{ .Values.global.jobs.local.volume.enabled | quote }} diff --git a/charts/airbyte/templates/minio.yaml b/charts/airbyte/templates/minio.yaml index a47ea4e21ce..4c6bd8cbe3d 100644 --- a/charts/airbyte/templates/minio.yaml +++ b/charts/airbyte/templates/minio.yaml @@ -53,11 +53,11 @@ spec: - containerPort: 9000 resources: requests: - memory: "1024Mi" - cpu: "200m" + memory: {{ .Values.minio.resources.requests.memory }} + cpu: {{ .Values.minio.resources.requests.cpu }} limits: - memory: "1024Mi" - cpu: "200m" + memory: {{ .Values.minio.resources.limits.memory }} + cpu: {{ .Values.minio.resources.limits.cpu }} # Mount the volume into the pod securityContext: allowPrivilegeEscalation: false diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 39d96bedf32..11115e40e72 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -237,16 +237,13 @@ webapp: # -- Security context for the container podSecurityContext: - # gid=101(nginx) - fsGroup: 101 + fsGroup: 1000 containerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # uid=101(nginx) - runAsUser: 101 - # gid=101(nginx) - runAsGroup: 101 + runAsUser: 1000 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] @@ -464,16 +461,13 @@ pod-sweeper: # -- Security context for the container podSecurityContext: - # gid=1001(anon) - fsGroup: 1001 + fsGroup: 1000 containerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # uid=1001(anon) - runAsUser: 1001 - # gid=1001(anon) - runAsGroup: 1001 + runAsUser: 1000 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] @@ -1768,6 +1762,13 @@ minio: # https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity affinity: {} + resources: + requests: + memory: 1Gi + cpu: 250m + limits: + cpu: 300m + memory: 2Gi ## @section cron parameters cron: enabled: true @@ -2054,8 +2055,7 @@ keycloak: # -- Security context for the container podSecurityContext: - # gid=0(root) - fsGroup: 0 + fsGroup: 1000 initContainers: initDb: @@ -2064,10 +2064,8 @@ keycloak: initContainerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # uid=70(postgres) - runAsUser: 70 - # gid=70(postgres) - runAsGroup: 70 + runAsUser: 1000 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] @@ -2077,10 +2075,8 @@ keycloak: containerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # uid=1000(keycloak) runAsUser: 1000 - # gid=0(root) - runAsGroup: 0 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] @@ -2113,10 +2109,8 @@ keycloak-setup: initContainerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # gid=100(curl_user) - runAsUser: 100 - # gid=101(curl_group) - runAsGroup: 101 + runAsUser: 1000 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] diff --git a/charts/helm-tests/go.mod b/charts/helm-tests/go.mod index b670f9b45a0..1f99c8a57be 100644 --- a/charts/helm-tests/go.mod +++ b/charts/helm-tests/go.mod @@ -3,8 +3,8 @@ module github.com/airbytehq/airbyte-platform-internal/oss/charts/helm-tests go 1.23.1 require ( - github.com/gruntwork-io/terratest v0.46.14 - github.com/stretchr/testify v1.8.4 + github.com/gruntwork-io/terratest v0.47.2 + github.com/stretchr/testify v1.9.0 gopkg.in/yaml.v3 v3.0.1 k8s.io/api v0.30.0 k8s.io/apimachinery v0.30.0 diff --git a/charts/helm-tests/go.sum b/charts/helm-tests/go.sum index 950bf57f58e..7a2bfe0f271 100644 --- a/charts/helm-tests/go.sum +++ b/charts/helm-tests/go.sum @@ -83,6 +83,8 @@ github.com/gruntwork-io/go-commons v0.8.0 h1:k/yypwrPqSeYHevLlEDmvmgQzcyTwrlZGRa github.com/gruntwork-io/go-commons v0.8.0/go.mod h1:gtp0yTtIBExIZp7vyIV9I0XQkVwiQZze678hvDXof78= github.com/gruntwork-io/terratest v0.46.14 h1:nVT2JpOPLr7KbwOSNDP0GJffljH+Yu5833cwLorxRjs= github.com/gruntwork-io/terratest v0.46.14/go.mod h1:L/IHbj195wnjfIFpZYWUhjwA3jm4O6ehO//xz7NxN8o= +github.com/gruntwork-io/terratest v0.47.2 h1:t6iWwsqJH7Gx0RwXleU/vjc+2c0JXRMdj3DxYXTBssQ= +github.com/gruntwork-io/terratest v0.47.2/go.mod h1:LnYX8BN5WxUMpDr8rtD39oToSL4CBERWSCusbJ0d/64= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-multierror v1.1.0 h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI= @@ -190,6 +192,8 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/texttheater/golang-levenshtein v1.0.1 h1:+cRNoVrfiwufQPhoMzB6N0Yf/Mqajr6t1lOv8GyGE2U= github.com/texttheater/golang-levenshtein v1.0.1/go.mod h1:PYAKrbF5sAiq9wd+H82hs7gNaen0CplQ9uvm6+enD/8= github.com/urfave/cli v1.22.2 h1:gsqYFH8bb9ekPA12kRo0hfjngWQjkJPlN9R0N78BoUo= diff --git a/charts/helm-tests/tests/basic_template_test.go b/charts/helm-tests/tests/basic_template_test.go index 628bf55a13d..68d1515b163 100644 --- a/charts/helm-tests/tests/basic_template_test.go +++ b/charts/helm-tests/tests/basic_template_test.go @@ -121,14 +121,16 @@ var commonConfigMapKeys = []string{ "DATABASE_URL", "DATA_DOCKER_MOUNT", "DB_DOCKER_MOUNT", - "FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT", - "FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST", + "FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT", + "FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST", "GOOGLE_APPLICATION_CREDENTIALS", "INTERNAL_API_HOST", "JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION", "JOB_KUBE_BUSYBOX_IMAGE", "JOB_KUBE_CURL_IMAGE", + "JOB_KUBE_CONNECTOR_IMAGE_REGISTRY", "JOB_KUBE_LOCAL_VOLUME_ENABLED", + "JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET", "JOB_MAIN_CONTAINER_CPU_LIMIT", "JOB_MAIN_CONTAINER_CPU_REQUEST", "JOB_MAIN_CONTAINER_MEMORY_LIMIT", diff --git a/charts/v2/airbyte/output.yaml b/charts/v2/airbyte/output.yaml deleted file mode 100644 index a1604c6ee2b..00000000000 --- a/charts/v2/airbyte/output.yaml +++ /dev/null @@ -1,3416 +0,0 @@ ---- -# Source: airbyte/templates/airbyte-workload-launcher/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-cron/cron-secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-connector-rollout-worker/deployment.yaml - -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-connector-rollout-worker - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - strategy: - type: Recreate - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - automountServiceAccountToken: true - containers: - - name: airbyte-connector-rollout-worker-container - image: "airbyte/connector-rollout-worker:1.16.0" - imagePullPolicy: "IfNotPresent" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: INTERNAL_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: INTERNAL_API_HOST - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TRACKING_STRATEGY - - name: WEBAPP_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WEBAPP_URL - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_HOST - - name: TEMPORAL_WORKER_PORTS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_WORKER_PORTS - - name: LOG_LEVEL - value: "INFO" - - # Values from secret - - # Values from env - livenessProbe: - httpGet: - path: /health/liveness - port: heartbeat - initialDelaySeconds: 30 - periodSeconds: 10 - timeoutSeconds: 1 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /health/readiness - port: heartbeat - initialDelaySeconds: 10 - periodSeconds: 10 - timeoutSeconds: 1 - successThreshold: 1 - failureThreshold: 3 - ports: - - name: heartbeat - containerPort: 8016 # for heartbeat server - resources: - limits: {} - requests: {} - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1000 - runAsNonRoot: true - runAsUser: 1000 - seccompProfile: - type: RuntimeDefault - volumeMounts: - securityContext: - fsGroup: 1000 - volumes: - ---- -# Source: airbyte/templates/secret.yaml -apiVersion: v1 -kind: Secret -metadata: - name: ab-airbyte-secrets - annotations: - helm.sh/hook: pre-install,pre-upgrade - helm.sh/hook-weight: "-1" - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -type: Opaque -stringData: - - - DATABASE_USER: airbyte - - DATABASE_PASSWORD: airbyte - - - - - - - MINIO_ACCESS_KEY_ID: "minio" - MINIO_SECRET_ACCESS_KEY: "minio123" - - - WORKLOAD_API_BEARER_TOKEN: ".Values.workloadApi.bearerToken" - ---- -# Source: airbyte/templates/airbyte-connector-builder-server/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-worker/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-worker/deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-worker - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - automountServiceAccountToken: true - containers: - - name: worker-container - image: "airbyte/worker:local-2b1bbad4" - imagePullPolicy: "IfNotPresent" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: CONFIG_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONFIG_ROOT - - - name: LOG_LEVEL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: LOG_LEVEL - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TRACKING_STRATEGY - - name: WORKSPACE_DOCKER_MOUNT - value: workspace - - name: WORKSPACE_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKSPACE_ROOT - - name: LOCAL_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: LOCAL_ROOT - - name: WEBAPP_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WEBAPP_URL - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_HOST - - name: TEMPORAL_WORKER_PORTS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_WORKER_PORTS - - name: JOB_KUBE_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: JOB_KUBE_SERVICEACCOUNT - value: airbyte-admin - - name: JOB_KUBE_BUSYBOX_IMAGE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_KUBE_BUSYBOX_IMAGE - - name: JOB_KUBE_CURL_IMAGE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_KUBE_CURL_IMAGE - - name: JOB_MAIN_CONTAINER_CPU_REQUEST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_REQUEST - - name: JOB_MAIN_CONTAINER_CPU_LIMIT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_LIMIT - - name: JOB_MAIN_CONTAINER_MEMORY_REQUEST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_REQUEST - - name: JOB_MAIN_CONTAINER_MEMORY_LIMIT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_LIMIT - - name: INTERNAL_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: INTERNAL_API_HOST - - name: WORKLOAD_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKLOAD_API_HOST - - name: WORKLOAD_API_BEARER_TOKEN - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: WORKLOAD_API_BEARER_TOKEN - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: METRIC_CLIENT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: METRIC_CLIENT - - name: OTEL_COLLECTOR_ENDPOINT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: OTEL_COLLECTOR_ENDPOINT - - name: ACTIVITY_MAX_ATTEMPT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: ACTIVITY_MAX_ATTEMPT - - name: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS - - name: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS - - name: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - - name: SHOULD_RUN_NOTIFY_WORKFLOWS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SHOULD_RUN_NOTIFY_WORKFLOWS - - name: MICRONAUT_ENVIRONMENTS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKERS_MICRONAUT_ENVIRONMENTS - # SECRETS MANAGER - - name: SECRET_PERSISTENCE - value: - # Values for awsSecretManager - - # Values for Azure Key Vault - - # Values for googleSecretManager secrets - - # Values for vault secrets - - # Storage - - - name: S3_PATH_STYLE_ACCESS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: S3_PATH_STYLE_ACCESS - - name: STORAGE_TYPE - value: MINIO - - name: STORAGE_BUCKET_ACTIVITY_PAYLOAD - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_ACTIVITY_PAYLOAD - - name: STORAGE_BUCKET_LOG - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_LOG - - name: STORAGE_BUCKET_STATE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_STATE - - name: STORAGE_BUCKET_WORKLOAD_OUTPUT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_WORKLOAD_OUTPUT - - - - - - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: "MINIO_ACCESS_KEY_ID" - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: "MINIO_SECRET_ACCESS_KEY" - - name: MINIO_ENDPOINT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MINIO_ENDPOINT - - - - - - # Database - - - - name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_DB - - - name: DATABASE_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - - - name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_URL - - # Values from secret - - # Values from env - - name: CONTAINER_ORCHESTRATOR_IMAGE - value: "airbyte/container-orchestrator:local-3dbcdb8f" - - name: FEATURE_FLAG_CLIENT - value: "configfile" - - name: FEATURE_FLAG_PATH - value: "/etc/launchdarkly/flags.yml" - - # Values from extraEnv for more compatibility (if you want to use external secret source or other stuff) - livenessProbe: - httpGet: - path: / - port: heartbeat - initialDelaySeconds: 30 - periodSeconds: 10 - timeoutSeconds: 1 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: / - port: heartbeat - initialDelaySeconds: 10 - periodSeconds: 10 - timeoutSeconds: 1 - successThreshold: 1 - failureThreshold: 3 - ports: - - name: heartbeat - containerPort: 9000 # for heartbeat server - - containerPort: 9001 # start temporal worker port pool - - containerPort: 9002 - - containerPort: 9003 - - containerPort: 9004 - - containerPort: 9005 - - containerPort: 9006 - - containerPort: 9007 - - containerPort: 9008 - - containerPort: 9009 - - containerPort: 9010 - - containerPort: 9011 - - containerPort: 9012 - - containerPort: 9013 - - containerPort: 9014 - - containerPort: 9015 - - containerPort: 9016 - - containerPort: 9017 - - containerPort: 9018 - - containerPort: 9019 - - containerPort: 9020 - - containerPort: 9021 - - containerPort: 9022 - - containerPort: 9023 - - containerPort: 9024 - - containerPort: 9025 - - containerPort: 9026 - - containerPort: 9027 - - containerPort: 9028 - - containerPort: 9029 - - containerPort: 9030 # end temporal worker port pool - resources: - limits: {} - requests: {} - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1000 - runAsNonRoot: true - runAsUser: 1000 - seccompProfile: - type: RuntimeDefault - volumeMounts: - - mountPath: /etc/launchdarkly/flags.yml - name: flags-yaml - securityContext: - fsGroup: 1000 - volumes: - - hostPath: - path: /Users/angel/Developer/github/airbytehq/airbyte-platform-internal/oss/flags.yml - type: FileOrCreate - name: flags-yaml - ---- -# Source: airbyte/templates/airbyte-temporal/deployment.yaml ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-temporal - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - containers: - - name: airbyte-temporal - image: "temporalio/auto-setup:1.23.0" - imagePullPolicy: IfNotPresent - env: - - name: AUTO_SETUP - value: "true" - - name: DB # The DB engine to use - value: "postgresql" - - name: DYNAMIC_CONFIG_FILE_PATH - value: "config/dynamicconfig/development.yaml" - - - - name: POSTGRES_SEEDS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DB_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: POSTGRES_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: POSTGRES_PWD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - # Values from secret - - # Values from env - ports: - - containerPort: 7233 - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1000 - runAsNonRoot: true - runAsUser: 1000 - seccompProfile: - type: RuntimeDefault - volumeMounts: - - name: airbyte-temporal-dynamicconfig - mountPath: "/etc/temporal/config/dynamicconfig/" - resources: - limits: {} - requests: {} - securityContext: - fsGroup: 1000 - volumes: - - name: airbyte-temporal-dynamicconfig - configMap: - name: airbyte-temporal-dynamicconfig - items: - - key: development.yaml - path: development.yaml - ---- -# Source: airbyte/templates/airbyte-keycloak/service.yaml -# If Cloud or Pro/Enterprise, render the keycloak service template. - ---- -# Source: airbyte/templates/airbyte-featureflag-server/service.yaml -apiVersion: v1 -kind: Service -metadata: - name: ab-featureflag-server-svc - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: - ports: - - port: - targetPort: http - protocol: TCP - name: http - selector: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - ---- -# Source: airbyte/templates/airbyte-pod-sweeper/deployment.yaml ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-pod-sweeper - namespace: - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - namespace: -spec: - replicas: - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - annotations: - checksum/sweep-pod-script: a1a5dd31de512aa3ae61598af1bbb3a4ad2c7669e128601e6f788df3b211c4a8 - spec: - serviceAccountName: airbyte-admin - containers: - - name: airbyte-pod-sweeper - image: "bitnami/kubectl:1.28.9" - imagePullPolicy: "" - env: - - name: KUBE_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: RUNNING_TTL_MINUTES - value: "" - - name: SUCCEEDED_TTL_MINUTES - value: "10" - - name: UNSUCCESSFUL_TTL_MINUTES - value: "120" - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1001 - runAsNonRoot: true - runAsUser: 1001 - seccompProfile: - type: RuntimeDefault - volumeMounts: - - mountPath: /script/sweep-pod.sh - subPath: sweep-pod.sh - name: sweep-pod-script - - mountPath: /.kube - name: kube-config - command: ["/bin/bash", "-c", /script/sweep-pod.sh] - livenessProbe: - exec: - command: - - /bin/sh - - -ec - - grep -aq sweep-pod.sh /proc/1/cmdline - initialDelaySeconds: - periodSeconds: - timeoutSeconds: - successThreshold: - failureThreshold: - readinessProbe: - exec: - command: - - /bin/sh - - -ec - - grep -aq sweep-pod.sh /proc/1/cmdline - initialDelaySeconds: - periodSeconds: - timeoutSeconds: - successThreshold: - failureThreshold: - securityContext: - fsGroup: 1001 - volumes: - - name: kube-config - emptyDir: {} - - name: sweep-pod-script - configMap: - name: airbyte-sweep-pod-script - defaultMode: 0755 - ---- -# Source: airbyte/templates/airbyte-workload-api-server/service.yaml -apiVersion: v1 -kind: Service -metadata: - name: {{ .Release.Name }}-workload-api-server-svc - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: - ports: - - port: - targetPort: http - protocol: TCP - name: http - selector: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - ---- -# Source: airbyte/templates/airbyte-metrics/deployment.yaml ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-metrics - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - app.airbyte.io/fullname: ab-airbyte -spec: - replicas: 1 - selector: - matchLabels: - airbyte: metrics - strategy: - type: Recreate # Needed due to volume claims - template: - metadata: - labels: - airbyte: metrics - spec: - serviceAccountName: airbyte-admin - containers: - - name: airbyte-metrics-container - image: "airbyte/metrics-reporter:local-7082b426" - imagePullPolicy: "IfNotPresent" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: METRIC_CLIENT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: METRIC_CLIENT - - name: OTEL_COLLECTOR_ENDPOINT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: OTEL_COLLECTOR_ENDPOINT - - - - name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_DB - - - name: DATABASE_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - - - name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_URL - # Values from secret - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - resources: - limits: {} - requests: {} - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1000 - runAsNonRoot: true - runAsUser: 1000 - seccompProfile: - type: RuntimeDefault - securityContext: - fsGroup: 1000 - volumes: - ---- -# Source: airbyte/templates/airbyte-connector-rollout-worker/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-connector-builder-server/service.yaml ---- -apiVersion: v1 -kind: Service -metadata: - name: ab-airbyte-connector-builder-server-svc - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: - ports: - - port: - targetPort: http - protocol: TCP - name: http - selector: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - ---- -# Source: airbyte/templates/airbyte-db.yaml - -apiVersion: v1 -kind: Service -metadata: - name: airbyte-db-svc - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-1" - labels: - - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: ClusterIP - ports: - - port: 5432 - protocol: TCP - selector: - - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab ---- -apiVersion: apps/v1 -kind: StatefulSet -metadata: - name: airbyte-db - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-1" - labels: - - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - serviceName: airbyte-db-svc - selector: - matchLabels: - - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab - template: - metadata: - labels: - - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab - spec: - containers: - - name: airbyte-db-container - image: "airbyte/db:1.16.0" - env: - - name: POSTGRES_DB - value: db-airbyte - - name: POSTGRES_PASSWORD - value: airbyte - - name: POSTGRES_USER - value: airbyte - - name: PGDATA - value: /var/lib/postgresql/data/pgdata - ports: - - containerPort: 5432 - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 70 - runAsNonRoot: true - runAsUser: 70 - seccompProfile: - type: RuntimeDefault - volumeMounts: - - name: airbyte-volume-db - mountPath: /var/lib/postgresql/data - securityContext: - fsGroup: 70 - - volumeClaimTemplates: - - metadata: - name: airbyte-volume-db - spec: - accessModes: [ "ReadWriteOnce" ] - resources: - requests: - storage: 500Mi - ---- -# Source: airbyte/templates/airbyte-workload-api-server/deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-workload-api-server - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - minReadySeconds: 30 - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - strategy: - type: RollingUpdate - rollingUpdate: - maxUnavailable: 0 - maxSurge: 100% - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - containers: - - name: airbyte-workload-api-server-container - image: "airbyte/workload-api-server:1.16.0" - imagePullPolicy: "IfNotPresent" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: INTERNAL_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: INTERNAL_API_HOST - - name: AIRBYTE_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_API_HOST - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_PORT - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_HOST - - name: WORKLOAD_API_BEARER_TOKEN - valueFrom: - secretKeyRef: - name: %!s()-airbyte-secrets - key: WORKLOAD_API_BEARER_TOK - # Database - - - - name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_DB - - - name: DATABASE_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - - - name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_URL - - # Values from secret - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - livenessProbe: - httpGet: - path: /health/liveness - port: http - initialDelaySeconds: 30 - periodSeconds: 10 - timeoutSeconds: 10 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /health/liveness - port: http - initialDelaySeconds: 10 - periodSeconds: 10 - timeoutSeconds: 10 - successThreshold: 1 - failureThreshold: 3 - - ports: - - name: http - containerPort: 8007 - protocol: TCP - resources: - limits: {} - requests: {} - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1000 - runAsNonRoot: true - runAsUser: 1000 - seccompProfile: - type: RuntimeDefault - volumeMounts: - securityContext: - fsGroup: 1000 - volumes: - ---- -# Source: airbyte/templates/airbyte-webapp/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-temporal/service.yaml ---- -apiVersion: v1 -kind: Service -metadata: - name: ab-temporal - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: ClusterIP - ports: - - port: 7233 - protocol: TCP - targetPort: 7233 - selector: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - ---- -# Source: airbyte/templates/airbyte-featureflag-server/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/tests/test-webapp.yaml ---- -apiVersion: v1 -kind: Pod -metadata: - name: "ab-test-connection" - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - annotations: - "helm.sh/hook": test -spec: - containers: - - name: wget - image: "busybox:latest" - command: ['wget'] - args: ['ab-airbyte-webapp-svc:80'] - resources: - requests: - memory: "64Mi" - cpu: "100m" - limits: - memory: "128Mi" - cpu: "200m" - restartPolicy: Never - ---- -# Source: airbyte/templates/airbyte-webapp/ingress.yaml ---- - - ---- -# Source: airbyte/templates/airbyte-temporal-ui/deployment.yaml ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-temporal-ui - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - containers: - - name: airbyte-temporal-ui - image: "temporalio/ui:2.30.1" - imagePullPolicy: - env: - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - - ports: - - name: http - containerPort: 8080 - protocol: TCP - volumeMounts: - securityContext: - fsGroup: 1000 - volumes: - ---- -# Source: airbyte/templates/serviceaccount.yaml -apiVersion: v1 -kind: ServiceAccount -metadata: - name: airbyte-admin - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-10" ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: airbyte-admin-role - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-5" -rules: - - apiGroups: ["*"] - resources: ["jobs", "pods", "pods/log", "pods/exec", "pods/attach", "secrets"] - verbs: ["get", "list", "watch", "create", "update", "patch", "delete"] # over-permission for now ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: airbyte-admin-binding - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-3" -roleRef: - apiGroup: "" - kind: Role - name: airbyte-admin-role -subjects: - - kind: ServiceAccount - name: airbyte-admin ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRole -metadata: - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - name: node-viewer -rules: - - apiGroups: [""] - resources: ["nodes"] - verbs: ["get", "list", "watch"] ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: ClusterRoleBinding -metadata: - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - name: airbyte-admin-admin-node-viewer -subjects: - - kind: ServiceAccount - name: airbyte-admin - namespace: ab -roleRef: - kind: ClusterRole - name: node-viewer - apiGroup: rbac.authorization.k8s.io - ---- -# Source: airbyte/templates/airbyte-webapp/deployment.yaml ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-webapp - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - containers: - - name: airbyte-webapp-container - image: "airbyte/webapp:local-271379a5" - imagePullPolicy: "IfNotPresent" - env: - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TRACKING_STRATEGY - - name: AIRBYTE_SERVER_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_SERVER_HOST - - name: KEYCLOAK_INTERNAL_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: KEYCLOAK_INTERNAL_HOST - - name: CONNECTOR_BUILDER_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONNECTOR_BUILDER_API_HOST - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: API_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: API_URL - - name: CONNECTOR_BUILDER_API_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONNECTOR_BUILDER_API_URL - # Values from secret - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - livenessProbe: - tcpSocket: - port: http - initialDelaySeconds: 30 - periodSeconds: 10 - timeoutSeconds: 1 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /index.html - port: http - initialDelaySeconds: 10 - periodSeconds: 10 - timeoutSeconds: 1 - successThreshold: 1 - failureThreshold: 3 - ports: - - name: http - containerPort: 8080 - protocol: TCP - resources: - limits: {} - requests: {} - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 101 - runAsNonRoot: true - runAsUser: 101 - seccompProfile: - type: RuntimeDefault - volumeMounts: - securityContext: - fsGroup: 101 - volumes: - ---- -# Source: airbyte/templates/airbyte-server/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-featureflag-server/deployment.yaml - -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-featureflag-server - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - minReadySeconds: 30 - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - strategy: - type: RollingUpdate - rollingUpdate: - maxUnavailable: 0 - maxSurge: 100% - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - containers: - - name: airbyte-featureflag-server-container - image: "airbyte/featureflag-server:1.16.0" - imagePullPolicy: "" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_PORT - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - livenessProbe: - httpGet: - path: /health/liveness - port: http - initialDelaySeconds: - periodSeconds: - timeoutSeconds: - successThreshold: - failureThreshold: - readinessProbe: - httpGet: - path: /health/liveness - port: http - initialDelaySeconds: - periodSeconds: - timeoutSeconds: - successThreshold: - failureThreshold: - - ports: - - name: http - containerPort: 8007 - protocol: TCP - volumeMounts: - securityContext: - fsGroup: 1000 - volumes: - ---- -# Source: airbyte/templates/airbyte-cron/deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-cron - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - automountServiceAccountToken: true - containers: - - name: airbyte-cron - image: "airbyte/cron:1.16.0" - imagePullPolicy: "IfNotPresent" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: MICRONAUT_ENVIRONMENTS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CRON_MICRONAUT_ENVIRONMENTS - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_HOST - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TRACKING_STRATEGY - - name: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - - name: WORKLOAD_API_BEARER_TOKEN - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: WORKLOAD_API_BEARER_TOKEN - - name: WORKLOAD_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKLOAD_API_HOST - - name: WORKSPACE_DOCKER_MOUNT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKSPACE_DOCKER_MOUNT - - name: WORKSPACE_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKSPACE_ROOT - - # Database - - - - name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_DB - - - name: DATABASE_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - - - name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_URL - - - # Values from secret - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1000 - runAsNonRoot: true - runAsUser: 1000 - seccompProfile: - type: RuntimeDefault - resources: - limits: {} - requests: {} - securityContext: - fsGroup: 1000 - ---- -# Source: airbyte/templates/airbyte-workload-launcher/deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-workload-launcher - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - strategy: - type: Recreate - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - automountServiceAccountToken: true - containers: - - name: airbyte-workload-launcher-container - image: "airbyte/workload-launcher:1.16.0" - imagePullPolicy: "" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: DATA_PLANE_ID - value: "local" - - name: PUB_SUB_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: PUB_SUB_ENABLED - - name: PUB_SUB_TOPIC_NAME - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: PUB_SUB_TOPIC_NAME - - name: CONFIG_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONFIG_ROOT - - - - name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_DB - - - name: DATABASE_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - - - name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_URL - - - name: LOG_LEVEL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: LOG_LEVEL - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TRACKING_STRATEGY - - name: WORKSPACE_DOCKER_MOUNT - value: workspace - - name: WORKSPACE_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKSPACE_ROOT - - name: LOCAL_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: LOCAL_ROOT - - name: WEBAPP_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WEBAPP_URL - - name: WORKLOAD_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKLOAD_API_HOST - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_HOST - - name: TEMPORAL_WORKER_PORTS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_WORKER_PORTS - - name: JOB_KUBE_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: JOB_KUBE_SERVICEACCOUNT - value: airbyte-admin - - name: CONNECTOR_SIDECAR_IMAGE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONNECTOR_SIDECAR_IMAGE - - name: WORKLOAD_INIT_IMAGE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKLOAD_INIT_IMAGE - - name: CONTAINER_ORCHESTRATOR_IMAGE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONTAINER_ORCHESTRATOR_IMAGE - - name: JOB_KUBE_BUSYBOX_IMAGE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_KUBE_BUSYBOX_IMAGE - - name: JOB_KUBE_CURL_IMAGE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_KUBE_CURL_IMAGE - - name: JOB_MAIN_CONTAINER_CPU_REQUEST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_REQUEST - - name: JOB_MAIN_CONTAINER_CPU_LIMIT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_LIMIT - - name: JOB_MAIN_CONTAINER_MEMORY_REQUEST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_REQUEST - - name: JOB_MAIN_CONTAINER_MEMORY_LIMIT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_LIMIT - - name: KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS - - name: WORKLOAD_API_BEARER_TOKEN - valueFrom: - secretKeyRef: - name: %!s()-airbyte-secrets - key: WORKLOAD_API_BEARER_TOKE - - name: WORKLOAD_API_BEARER_TOKEN_SECRET_NAME - value: %!s()-airbyte-secrets - - name: WORKLOAD_API_BEARER_TOKEN_SECRET_KEY - value: WORKLOAD_API_BEARER_TOKE - - name: INTERNAL_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: INTERNAL_API_HOST - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: METRIC_CLIENT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: METRIC_CLIENT - - name: OTEL_COLLECTOR_ENDPOINT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: OTEL_COLLECTOR_ENDPOINT - - name: ACTIVITY_MAX_ATTEMPT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: ACTIVITY_MAX_ATTEMPT - - name: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS - - name: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS - - name: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - - name: SHOULD_RUN_NOTIFY_WORKFLOWS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SHOULD_RUN_NOTIFY_WORKFLOWS - - name: MICRONAUT_ENVIRONMENTS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: LAUNCHER_MICRONAUT_ENVIRONMENTS - - name: WORKLOAD_LAUNCHER_PARALLELISM - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKLOAD_LAUNCHER_PARALLELISM - - name: FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT - - name: FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST - - # SECRETS MANAGER - - name: SECRET_PERSISTENCE - value: - # Values for AwsSecretsManager - - # Values for Azure Key Vault - - # Values for googleSecretManager secrets - - # Values for vault secrets - - # Storage - - - name: S3_PATH_STYLE_ACCESS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: S3_PATH_STYLE_ACCESS - - name: STORAGE_TYPE - value: MINIO - - name: STORAGE_BUCKET_ACTIVITY_PAYLOAD - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_ACTIVITY_PAYLOAD - - name: STORAGE_BUCKET_LOG - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_LOG - - name: STORAGE_BUCKET_STATE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_STATE - - name: STORAGE_BUCKET_WORKLOAD_OUTPUT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_WORKLOAD_OUTPUT - - - - - - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: "MINIO_ACCESS_KEY_ID" - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: "MINIO_SECRET_ACCESS_KEY" - - name: MINIO_ENDPOINT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MINIO_ENDPOINT - - - - - - # Values from secret - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - livenessProbe: - httpGet: - path: /health/liveness - port: heartbeat - initialDelaySeconds: - periodSeconds: - timeoutSeconds: - successThreshold: - failureThreshold: - readinessProbe: - httpGet: - path: /health/readiness - port: heartbeat - initialDelaySeconds: - periodSeconds: - timeoutSeconds: - successThreshold: - failureThreshold: - ports: - - name: heartbeat - containerPort: 8016 # for heartbeat server - volumeMounts: - securityContext: - fsGroup: 1000 - volumes: - ---- -# Source: airbyte/templates/airbyte-server/service.yaml ---- -apiVersion: v1 -kind: Service -metadata: - name: ab-airbyte-server-svc - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: ClusterIP - ports: - - port: 8001 - targetPort: http - protocol: TCP - name: http - selector: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - ---- -# Source: airbyte/templates/airbyte-connector-builder-server/deployment.yaml ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-connector-builder-server - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - strategy: - type: - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - containers: - - name: connector-builder-server - image: "airbyte/connector-builder-server:1.16.0" - imagePullPolicy: "IfNotPresent" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TRACKING_STRATEGY - - # Values from secret - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - livenessProbe: - httpGet: - path: /v1/health - port: http - initialDelaySeconds: 30 - periodSeconds: 10 - timeoutSeconds: 10 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /v1/health - port: http - initialDelaySeconds: 10 - periodSeconds: 10 - timeoutSeconds: 10 - successThreshold: 1 - failureThreshold: 3 - - ports: - - name: http - containerPort: 8080 - protocol: TCP - resources: - limits: {} - requests: {} - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1000 - runAsNonRoot: true - runAsUser: 1000 - seccompProfile: - type: RuntimeDefault - volumeMounts: - securityContext: - fsGroup: 1000 - volumes: - ---- -# Source: airbyte/templates/env-configmap.yaml - # default to empty dict if airbyteYml is undefined -apiVersion: v1 -kind: ConfigMap -metadata: - name: ab-airbyte-env - annotations: - helm.sh/hook: pre-install,pre-upgrade - helm.sh/hook-weight: "-1" - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -data: - AIRBYTE_VERSION: 1.16.0 - AIRBYTE_EDITION: "community" - AIRBYTE_URL: "" - - AIRBYTE_SERVER_HOST: ab-airbyte-server-svc:8001 - API_URL: /api/v1/ - CONNECTOR_BUILDER_API_URL: "/connector-builder-api" - CONFIG_API_HOST: http://ab-airbyte-server-svc:8001 # temporary solution for oss kube deploys for airbyte api server until that server is wrapped into the config server - CONFIG_ROOT: /configs - CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION: "0.35.15.001" - DATA_DOCKER_MOUNT: airbyte_data - DB_DOCKER_MOUNT: airbyte_db - - DATABASE_HOST: airbyte-db-svc - DATABASE_PORT: "5432" - DATABASE_DB: db-airbyte - DATABASE_URL: jdbc:postgresql://airbyte-db-svc:5432/db-airbyte - KEYCLOAK_DATABASE_URL: "\n\n\njdbc:postgresql://airbyte-db-svc:5432/db-airbyte?currentSchema=keycloak" - GOOGLE_APPLICATION_CREDENTIALS: "/secrets/gcs-log-creds/gcp.json" - INTERNAL_API_HOST: http://ab-airbyte-server-svc:8001 - WORKLOAD_API_HOST: http://ab-workload-api-server-svc:8007 - KEYCLOAK_INTERNAL_HOST: localhost # just a placeholder so that nginx template is valid - shouldn't be used when edition isn't "pro" - - CONNECTOR_BUILDER_API_HOST: ab-airbyte-connector-builder-server-svc:80 - AIRBYTE_API_HOST: http://localhost:8001/api/public - - JOB_KUBE_BUSYBOX_IMAGE: "busybox:1.35" - JOB_KUBE_CURL_IMAGE: "curlimages/curl:8.1.1" - - CONTAINER_ORCHESTRATOR_IMAGE: "airbyte/container-orchestrator:1.16.0" - WORKLOAD_INIT_IMAGE: "airbyte/workload-init-container:1.16.0" - CONNECTOR_SIDECAR_IMAGE: "airbyte/connector-sidecar:1.16.0" - - JOB_MAIN_CONTAINER_CPU_LIMIT: "" - JOB_MAIN_CONTAINER_CPU_REQUEST: "" - JOB_MAIN_CONTAINER_MEMORY_LIMIT: "" - JOB_MAIN_CONTAINER_MEMORY_REQUEST: "" - JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION: "0.29.15.001" - LOCAL_ROOT: /tmp/airbyte_local - LOG_LEVEL: INFO - MICROMETER_METRICS_ENABLED: "false" - MICROMETER_METRICS_STATSD_FLAVOR: "datadog" - RUN_DATABASE_MIGRATION_ON_STARTUP: "true" - SEGMENT_WRITE_KEY: 7UDdp5K55CyiGgsauOr2pNNujGvmhaeu - # Storage start - STORAGE_TYPE: "minio" - STORAGE_BUCKET_ACTIVITY_PAYLOAD: "airbyte-storage" - STORAGE_BUCKET_LOG: "airbyte-storage" - STORAGE_BUCKET_STATE: "airbyte-storage" - STORAGE_BUCKET_WORKLOAD_OUTPUT: "airbyte-storage" - - MINIO_ENDPOINT: "http://airbyte-minio-svc:9000" - S3_PATH_STYLE_ACCESS: "true" - # Storage end - STATSD_HOST: "localhost" - STATSD_PORT: "8125" - TEMPORAL_HOST: ab-temporal:7233 - TEMPORAL_WORKER_PORTS: 9001,9002,9003,9004,9005,9006,9007,9008,9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,9022,9023,9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039,9040 - TRACKING_STRATEGY: segment - WEBAPP_URL: http://ab-airbyte-webapp-svc:80 - WORKER_ENVIRONMENT: kubernetes - WORKSPACE_DOCKER_MOUNT: airbyte_workspace - WORKSPACE_ROOT: /workspace - METRIC_CLIENT: "" - OTEL_COLLECTOR_ENDPOINT: "" - ACTIVITY_MAX_ATTEMPT: "" - ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS: "" - ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS: "" - WORKFLOW_FAILURE_RESTART_DELAY_SECONDS: "" - FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT: 5G - FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST: 5G - - LAUNCHER_MICRONAUT_ENVIRONMENTS: "control-plane,oss" - WORKERS_MICRONAUT_ENVIRONMENTS: "control-plane" - CRON_MICRONAUT_ENVIRONMENTS: "control-plane" - SERVER_MICRONAUT_ENVIRONMENTS: "control-plane" - SHOULD_RUN_NOTIFY_WORKFLOWS: "true" - MAX_NOTIFY_WORKERS: "5" - KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS: "" - WORKLOAD_LAUNCHER_PARALLELISM: "10" - CONNECTOR_BUILDER_SERVER_API_HOST: http://ab-airbyte-connector-builder-server-svc:80 - PUB_SUB_ENABLED: "false" - PUB_SUB_TOPIC_NAME: "" - ENTERPRISE_SOURCE_STUBS_URL: https://connectors.airbyte.com/files/resources/connector_stubs/v0/connector_stubs.json - ---- -# Source: airbyte/templates/airbyte-workload-api-server/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-temporal/secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-pod-sweeper/configmap.yaml ---- - -apiVersion: v1 -kind: ConfigMap -metadata: - name: airbyte-sweep-pod-script - namespace: - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - -data: - sweep-pod.sh: | - #!/bin/bash - get_job_pods () { - kubectl -n ${KUBE_NAMESPACE} -L airbyte -l airbyte=job-pod \ - get pods \ - -o=jsonpath='{range .items[*]} {.metadata.name} {.status.phase} {.status.conditions[0].lastTransitionTime} {.status.startTime}{"\n"}{end}' - } - delete_pod() { - printf "From status '%s' since '%s', " $2 $3 - echo "$1" | grep -v "STATUS" | awk '{print $1}' | xargs --no-run-if-empty kubectl -n ${KUBE_NAMESPACE} delete pod - } - while : - do - echo "Starting pod sweeper cycle:" - - if [ -n "${RUNNING_TTL_MINUTES}" ]; then - # Time window for running pods - RUNNING_DATE_STR=`date -d "now - ${RUNNING_TTL_MINUTES} minutes" --utc -Ins` - RUNNING_DATE=`date -d $RUNNING_DATE_STR +%s` - echo "Will sweep running pods from before ${RUNNING_DATE_STR}" - fi - - if [ -n "${SUCCEEDED_TTL_MINUTES}" ]; then - # Shorter time window for succeeded pods - SUCCESS_DATE_STR=`date -d "now - ${SUCCEEDED_TTL_MINUTES} minutes" --utc -Ins` - SUCCESS_DATE=`date -d $SUCCESS_DATE_STR +%s` - echo "Will sweep succeeded pods from before ${SUCCESS_DATE_STR}" - fi - - if [ -n "${UNSUCCESSFUL_TTL_MINUTES}" ]; then - # Longer time window for unsuccessful pods (to debug) - NON_SUCCESS_DATE_STR=`date -d "now - ${UNSUCCESSFUL_TTL_MINUTES} minutes" --utc -Ins` - NON_SUCCESS_DATE=`date -d $NON_SUCCESS_DATE_STR +%s` - echo "Will sweep unsuccessful pods from before ${NON_SUCCESS_DATE_STR}" - fi - ( - IFS=$'\n' - for POD in `get_job_pods`; do - IFS=' ' - POD_NAME=`echo $POD | cut -d " " -f 1` - POD_STATUS=`echo $POD | cut -d " " -f 2` - POD_DATE_STR=`echo $POD | cut -d " " -f 3` - POD_START_DATE_STR=`echo $POD | cut -d " " -f 4` - POD_DATE=`date -d ${POD_DATE_STR:-$POD_START_DATE_STR} '+%s'` - if [ -n "${RUNNING_TTL_MINUTES}" ] && [ "$POD_STATUS" = "Running" ]; then - if [ "$POD_DATE" -lt "$RUNNING_DATE" ]; then - delete_pod "$POD_NAME" "$POD_STATUS" "$POD_DATE_STR" - fi - elif [ -n "${SUCCEEDED_TTL_MINUTES}" ] && [ "$POD_STATUS" = "Succeeded" ]; then - if [ "$POD_DATE" -lt "$SUCCESS_DATE" ]; then - delete_pod "$POD_NAME" "$POD_STATUS" "$POD_DATE_STR" - fi - elif [ -n "${UNSUCCESSFUL_TTL_MINUTES}" ] && [ "$POD_STATUS" != "Running" ] && [ "$POD_STATUS" != "Succeeded" ]; then - if [ "$POD_DATE" -lt "$NON_SUCCESS_DATE" ]; then - delete_pod "$POD_NAME" "$POD_STATUS" "$POD_DATE_STR" - fi - fi - done - ) - echo "Completed pod sweeper cycle. Sleeping for 60 seconds..." - sleep 60 - done - ---- -# Source: airbyte/templates/airbyte-temporal/configmap.yaml ---- -apiVersion: v1 -kind: ConfigMap -metadata: - name: airbyte-temporal-dynamicconfig - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -data: - "development.yaml": | - # when modifying, remember to update the docker-compose version of this file in temporal/dynamicconfig/development.yaml - frontend.enableClientVersionCheck: - - value: true - constraints: {} - history.persistenceMaxQPS: - - value: 3000 - constraints: {} - frontend.persistenceMaxQPS: - - value: 3000 - constraints: {} - frontend.historyMgrNumConns: - - value: 30 - constraints: {} - frontend.throttledLogRPS: - - value: 20 - constraints: {} - frontend.enableUpdateWorkflowExecution: - - value: true - frontend.enableUpdateWorkflowExecutionAsyncAccepted: - - value: true - history.historyMgrNumConns: - - value: 50 - constraints: {} - system.advancedVisibilityWritingMode: - - value: "off" - constraints: {} - history.defaultActivityRetryPolicy: - - value: - InitialIntervalInSeconds: 1 - MaximumIntervalCoefficient: 100.0 - BackoffCoefficient: 2.0 - MaximumAttempts: 0 - history.defaultWorkflowRetryPolicy: - - value: - InitialIntervalInSeconds: 1 - MaximumIntervalCoefficient: 100.0 - BackoffCoefficient: 2.0 - MaximumAttempts: 0 - # Limit for responses. This mostly impacts discovery jobs since they have the largest responses. - limit.blobSize.error: - - value: 15728640 # 15MB - constraints: {} - limit.blobSize.warn: - - value: 10485760 # 10MB - constraints: {} - ---- -# Source: airbyte/templates/airbyte-workload-launcher/jobs-secrets.yaml -# Create dataplane secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-temporal-ui/service.yaml ---- -apiVersion: v1 -kind: Service -metadata: - name: ab-airbyte-temporal-ui-svc - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: - ports: - - port: - targetPort: http - protocol: TCP - name: http - selector: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - ---- -# Source: airbyte/templates/airbyte-keycloak/statefulset.yaml -# if Cloud or Pro/Enterprise, render the keycloak StatefulSet template. - ---- -# Source: airbyte/templates/airbyte-bootloader/pod.yaml ---- -apiVersion: v1 -kind: Pod -metadata: - name: airbyte-bootloader - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - annotations: - helm.sh/hook: pre-install,pre-upgrade - helm.sh/hook-weight: "0" -spec: - serviceAccountName: airbyte-admin - restartPolicy: Never - containers: - - name: airbyte-bootloader-container - image: "airbyte/bootloader:1.16.0" - imagePullPolicy: "" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: RUN_DATABASE_MIGRATION_ON_STARTUP - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: RUN_DATABASE_MIGRATION_ON_STARTUP - - # Airbyte auth secret keys and values - - - - name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_DB - - - name: DATABASE_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - - - name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_URL - # Values from secret - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - securityContext: - fsGroup: 1000 - ---- -# Source: airbyte/templates/airbyte-bootloader/bootloader-secrets.yaml -# Create secrets only for the local deployment - ---- -# Source: airbyte/templates/airbyte-webapp/service.yaml ---- -apiVersion: v1 -kind: Service -metadata: - name: ab-airbyte-webapp-svc - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: ClusterIP - ports: - - port: 80 - targetPort: http - protocol: TCP - name: http - - selector: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - ---- -# Source: airbyte/templates/airbyte-server/deployment.yaml ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: airbyte-server - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - minReadySeconds: 30 - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - strategy: - type: # Needed due to volume claims - template: - metadata: - labels: - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - spec: - serviceAccountName: airbyte-admin - containers: - - name: airbyte-server-container - image: "airbyte/server:local-8de41bb4" - imagePullPolicy: "IfNotPresent" - env: - - - name: LOG_LEVEL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: LOG_LEVEL - - name: AIRBYTE_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_API_HOST - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: AIRBYTE_EDITION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_EDITION - - name: AIRBYTE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_URL - - name: CONFIG_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONFIG_ROOT - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: MICRONAUT_ENVIRONMENTS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SERVER_MICRONAUT_ENVIRONMENTS - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TRACKING_STRATEGY - - name: WORKER_ENVIRONMENT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKER_ENVIRONMENT - - name: WORKSPACE_ROOT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WORKSPACE_ROOT - - name: WEBAPP_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: WEBAPP_URL - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: TEMPORAL_HOST - - name: JOB_MAIN_CONTAINER_CPU_REQUEST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_REQUEST - - name: JOB_MAIN_CONTAINER_CPU_LIMIT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_LIMIT - - name: JOB_MAIN_CONTAINER_MEMORY_REQUEST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_REQUEST - - name: JOB_MAIN_CONTAINER_MEMORY_LIMIT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_LIMIT - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: KEYCLOAK_INTERNAL_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: KEYCLOAK_INTERNAL_HOST - - name: CONNECTOR_BUILDER_SERVER_API_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: CONNECTOR_BUILDER_SERVER_API_HOST - - name: AIRBYTE_API_AUTH_HEADER_NAME - value: "X-Airbyte-Auth" - - name: AIRBYTE_API_AUTH_HEADER_VALUE - value: "Internal server" - - name: ENTERPRISE_SOURCE_STUBS_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: ENTERPRISE_SOURCE_STUBS_URL - - # Secrets Manager - - name: SECRET_PERSISTENCE - value: - # Values for AwsSecretsManager - - # Values for Azure Key Vault - - # Values for googleSecretManager secrets - - # Values for vault secrets - - # Storage - - - name: S3_PATH_STYLE_ACCESS - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: S3_PATH_STYLE_ACCESS - - name: STORAGE_TYPE - value: MINIO - - name: STORAGE_BUCKET_ACTIVITY_PAYLOAD - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_ACTIVITY_PAYLOAD - - name: STORAGE_BUCKET_LOG - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_LOG - - name: STORAGE_BUCKET_STATE - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_STATE - - name: STORAGE_BUCKET_WORKLOAD_OUTPUT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: STORAGE_BUCKET_WORKLOAD_OUTPUT - - - - - - - name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: "MINIO_ACCESS_KEY_ID" - - name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: "MINIO_SECRET_ACCESS_KEY" - - name: MINIO_ENDPOINT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MINIO_ENDPOINT - - - - - - - - name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_DB - - - name: DATABASE_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - - - name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_URL - - - - - # Values from secret - - # Values from env - - name: CORS_ALLOWED_ORIGINS_REGEX - value: "^https:\\/\\/localhost:\\d+$" - - name: FEATURE_FLAG_CLIENT - value: "configfile" - - name: FEATURE_FLAG_PATH - value: "/etc/launchdarkly/flags.yml" - - name: PATH_TO_CONNECTORS - value: "/connectors" - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - livenessProbe: - httpGet: - path: /api/v1/health - port: http - initialDelaySeconds: 30 - periodSeconds: 10 - timeoutSeconds: 10 - successThreshold: 1 - failureThreshold: 3 - readinessProbe: - httpGet: - path: /api/v1/health - port: http - initialDelaySeconds: 5 - periodSeconds: 3 - timeoutSeconds: 10 - successThreshold: 1 - failureThreshold: 3 - ports: - - name: http - containerPort: 8001 - protocol: TCP - resources: - limits: {} - requests: {} - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 1000 - runAsNonRoot: true - runAsUser: 1000 - seccompProfile: - type: RuntimeDefault - volumeMounts: - - - mountPath: /etc/launchdarkly/flags.yml - name: flags-yaml - - mountPath: /connectors - name: connectors - securityContext: - fsGroup: 1000 - volumes: - - - hostPath: - path: /Users/angel/Developer/github/airbytehq/airbyte-platform-internal/oss/flags.yml - type: FileOrCreate - name: flags-yaml - - hostPath: - path: /update-me - type: DirectoryOrCreate - name: connectors - ---- -# Source: airbyte/templates/minio.yaml - -apiVersion: apps/v1 # for k8s versions before 1.9.0 use apps/v1beta2 and before 1.8.0 use extensions/v1beta1 -kind: StatefulSet -metadata: - # This name uniquely identifies the Deployment - name: airbyte-minio - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-1" - labels: - - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: ab-minio - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - selector: - matchLabels: - - app.kubernetes.io/name: ab-minio - app.kubernetes.io/instance: ab - serviceName: airbyte-minio-svc - volumeClaimTemplates: - - metadata: - name: airbyte-minio-pv-claim - spec: - accessModes: [ "ReadWriteOnce" ] - resources: - requests: - storage: 500Mi - template: - metadata: - labels: - # Label is used as selector in the service. - - app.kubernetes.io/name: ab-minio - app.kubernetes.io/instance: ab - spec: - # Refer to the PVC created earlier - securityContext: - fsGroup: 1000 - containers: - - name: airbyte-minio - image: "minio/minio:RELEASE.2023-11-20T22-40-07Z" - args: - - server - - /storage - env: - # Minio access key and secret key. This must match the S3_ACCESS_KEY_ID and S3_SECRET_ACCESS_KEY declared in /dev/.env. - - name: MINIO_ROOT_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: MINIO_ACCESS_KEY_ID - - name: MINIO_ROOT_PASSWORD - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: MINIO_SECRET_ACCESS_KEY - ports: - - containerPort: 9000 - resources: - requests: - memory: "1024Mi" - cpu: "200m" - limits: - memory: "1024Mi" - cpu: "200m" - # Mount the volume into the pod - securityContext: - allowPrivilegeEscalation: false - runAsNonRoot: true - # uid=1000(airbyte) - runAsUser: 1000 - # gid=1000(airbyte) - runAsGroup: 1000 - readOnlyRootFilesystem: false - capabilities: - drop: ["ALL"] - seccompProfile: - type: RuntimeDefault - - volumeMounts: - - name: airbyte-minio-pv-claim # must match the volume name, above - mountPath: "/storage" - ---- -apiVersion: v1 -kind: Service -metadata: - name: airbyte-minio-svc - labels: - - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: ab-minio - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-1" -spec: - ports: - - port: 9000 - targetPort: 9000 - protocol: TCP - selector: - - app.kubernetes.io/name: ab-minio - app.kubernetes.io/instance: ab ---- -# This pod creates the state-storage bucket in the minio server, -# which the local cloud deployment requires to store its state. -apiVersion: v1 -kind: Pod -metadata: - name: airbyte-minio-create-bucket - annotations: - "helm.sh/hook": post-install, post-upgrade - "helm.sh/hook-delete-policy": hook-succeeded, before-hook-creation -spec: - restartPolicy: OnFailure - securityContext: - fsGroup: 1000 - containers: - - name: minio-mc - image: "airbyte/mc:latest" - command: ["/bin/sh", "-c", - "until (/usr/bin/mc config host add myminio $MINIO_ENDPOINT $MINIO_ACCESS_KEY $MINIO_SECRET_KEY) do echo '...waiting...' && sleep 1; done; - /usr/bin/mc mb --ignore-existing myminio/state-storage; - /usr/bin/mc policy set public myminio/state-storage; - /usr/bin/mc mb --ignore-existing myminio/airbyte-dev-logs; - /usr/bin/mc policy set public myminio/airbyte-dev-logs;"] - securityContext: - allowPrivilegeEscalation: false - runAsNonRoot: true - # uid=1000(airbyte) - runAsUser: 1000 - # gid=1000(airbyte) - runAsGroup: 1000 - readOnlyRootFilesystem: false - capabilities: - drop: ["ALL"] - seccompProfile: - type: RuntimeDefault - env: - # this is for the internally deployed minio - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: MINIO_ACCESS_KEY_ID - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: MINIO_SECRET_ACCESS_KEY - - name: MINIO_ENDPOINT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: MINIO_ENDPOINT ---- -# Source: airbyte/templates/airbyte-bootloader/pod.yaml -apiVersion: v1 -kind: Pod -metadata: - name: airbyte-bootloader - labels: - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: airbyte - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm - annotations: - helm.sh/hook: pre-install,pre-upgrade - helm.sh/hook-weight: "0" -spec: - serviceAccountName: airbyte-admin - restartPolicy: Never - containers: - - name: airbyte-bootloader-container - image: "airbyte/bootloader:1.16.0" - imagePullPolicy: "" - env: - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: AIRBYTE_VERSION - - name: RUN_DATABASE_MIGRATION_ON_STARTUP - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: RUN_DATABASE_MIGRATION_ON_STARTUP - - # Airbyte auth secret keys and values - - - - name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_HOST - - - name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_PORT - - - name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_DB - - - name: DATABASE_USER - valueFrom: - secretKeyRef: - name: ab-airbyte-secrets - key: DATABASE_USER - - - name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: - ab-airbyte-secrets - key: DATABASE_PASSWORD - - - name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: ab-airbyte-env - key: DATABASE_URL - # Values from secret - - # Values from env - - # Values from extraEnv for more compability(if you want to use external secret source or other stuff) - securityContext: - fsGroup: 1000 ---- -# Source: airbyte/templates/airbyte-db.yaml -apiVersion: v1 -kind: Service -metadata: - name: airbyte-db-svc - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-1" - labels: - - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - type: ClusterIP - ports: - - port: 5432 - protocol: TCP - selector: - - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab ---- -# Source: airbyte/templates/airbyte-db.yaml -apiVersion: apps/v1 -kind: StatefulSet -metadata: - name: airbyte-db - annotations: - helm.sh/hook: pre-install - helm.sh/hook-weight: "-1" - labels: - - helm.sh/chart: airbyte-0.1.0 - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab - app.kubernetes.io/version: "1.16.0" - app.kubernetes.io/managed-by: Helm -spec: - replicas: 1 - serviceName: airbyte-db-svc - selector: - matchLabels: - - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab - template: - metadata: - labels: - - app.kubernetes.io/name: ab-db - app.kubernetes.io/instance: ab - spec: - containers: - - name: airbyte-db-container - image: "airbyte/db:1.16.0" - env: - - name: POSTGRES_DB - value: db-airbyte - - name: POSTGRES_PASSWORD - value: airbyte - - name: POSTGRES_USER - value: airbyte - - name: PGDATA - value: /var/lib/postgresql/data/pgdata - ports: - - containerPort: 5432 - securityContext: - allowPrivilegeEscalation: false - capabilities: - drop: - - ALL - readOnlyRootFilesystem: false - runAsGroup: 70 - runAsNonRoot: true - runAsUser: 70 - seccompProfile: - type: RuntimeDefault - volumeMounts: - - name: airbyte-volume-db - mountPath: /var/lib/postgresql/data - securityContext: - fsGroup: 70 - - volumeClaimTemplates: - - metadata: - name: airbyte-volume-db - spec: - accessModes: [ "ReadWriteOnce" ] - resources: - requests: - storage: 500Mi diff --git a/charts/v2/airbyte/templates/_database.tpl b/charts/v2/airbyte/templates/_database.tpl deleted file mode 100644 index fd0e5ef8736..00000000000 --- a/charts/v2/airbyte/templates/_database.tpl +++ /dev/null @@ -1,234 +0,0 @@ -{{/* -Database Configuration -*/}} - -{{/* -Renders the database host -*/}} -{{- define "airbyte.database.host" }} - {{- if .Values.postgresql.enabled }} - {{- printf "%s" "airbyte-db-svc" }} - {{- else if .Values.global.database.host }} - {{- .Values.global.database.host }} - {{- else }} - {{ $host := .Values.global.database.host | required "You must set `global.database.host` when using an external database" }} - {{- end }} -{{- end }} - -{{/* -Renders an environment variable definition that provides the database host -*/}} -{{- define "airbyte.database.host.env" }} -- name: DATABASE_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: DATABASE_HOST -{{- end }} - -{{/* -Renders the database port -*/}} -{{- define "airbyte.database.port" }} - {{- if .Values.postgresql.enabled }} - {{- printf "%s" "5432" }} - {{- else if .Values.global.database.port }} - {{- .Values.global.database.port }} - {{- else }} - {{ $port := .Values.global.database.port | required "You must set `global.database.port` when using an external database" }} - {{- end }} -{{- end }} - -{{/* -Renders an environment variable definition that provides the database port -*/}} -{{- define "airbyte.database.port.env" }} -- name: DATABASE_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: DATABASE_PORT -{{- end }} - -{{/* -Renders the database name -*/}} -{{- define "airbyte.database.name" }} - {{- if .Values.postgresql.enabled }} - {{- .Values.postgresql.postgresqlDatabase }} - {{- else if .Values.global.database.database }} - {{- .Values.global.database.database }} - {{- else }} - {{ $database := .Values.global.database.database | required "You must set `global.database.database` when using an external database" }} - {{- end }} -{{- end }} - -{{/* -Renders an environment variable definition that provides the database name -*/}} -{{- define "airbyte.database.name.env" }} -- name: DATABASE_DB - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: DATABASE_DB -{{- end }} - -{{/* -Renders the database user -*/}} -{{- define "airbyte.database.user" }} - {{- if .Values.postgresql.enabled }} - {{- .Values.postgresql.postgresqlUsername }} - {{- else if .Values.global.database.user }} - {{- .Values.global.database.user }} - {{- else }} - {{- if .Values.global.database.userSecretKey }} - {{ $secretName := .Values.global.database.secretName | required "You must set `global.database.secretName` when using an external database" }} - {{- else }} - {{ $user := .Values.global.database.user | required "You must set `global.database.user` when using an external database" }} - {{- end }} - {{- end }} -{{- end }} - -{{/* -Renders the name of the secret where the database user will be referenced -*/}} -{{- define "airbyte.database.userSecretKey" }} - {{- if .Values.global.database.userSecretKey }} - {{ $secretName := .Values.global.database.secretName | required "You must set `global.database.secretName` when using an external database" }} - {{- .Values.global.database.userSecretKey }} - {{- else }} - {{- printf "%s" "DATABASE_USER" }} - {{- end }} -{{- end }} - -{{/* -Renders an environment variable definition that provides the database user -*/}} -{{- define "airbyte.database.user.env" }} -- name: DATABASE_USER - valueFrom: - secretKeyRef: - {{- if .Values.global.database.userSecretKey }} - name: {{ .Values.global.database.secretName }} - {{- else }} - name: {{ .Release.Name }}-airbyte-secrets - {{- end }} - key: {{ include "airbyte.database.userSecretKey" .}} -{{- end }} - -{{/* -Renders the database password -*/}} -{{- define "airbyte.database.password" }} - {{- if .Values.postgresql.enabled }} - {{- .Values.postgresql.postgresqlPassword }} - {{- else if .Values.global.database.password }} - {{- .Values.global.database.password }} - {{- else }} - {{- if .Values.global.database.passwordSecretKey }} - {{ $secretName := .Values.global.database.secretName | required "You must set `global.database.secretName` when using an external database" }} - {{- else }} - {{ $password := .Values.global.database.password | required "You must set `global.database.password` when using an external database" }} - {{- end }} - {{- end }} -{{- end }} - -{{/* -Renders the name of the secret where the database password will be referenced -*/}} -{{- define "airbyte.database.passwordSecretKey" }} - {{- if .Values.global.database.passwordSecretKey }} - {{ $secretName := .Values.global.database.secretName | required "You must set `global.database.secretName` when using an external database" }} - {{- .Values.global.database.passwordSecretKey }} - {{- else }} - {{- printf "%s" "DATABASE_PASSWORD" }} - {{- end }} -{{- end }} - -{{/* -Renders an environment variable definition that provides the database password -*/}} -{{- define "airbyte.database.password.env" }} -- name: DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: {{ include "airbyte.database.secretName" . }} - key: {{ include "airbyte.database.passwordSecretKey" . }} -{{- end }} - - -{{/* -Renders the database url (JDBC) -*/}} -{{- define "airbyte.database.url" }} -{{- $host := (include "airbyte.database.host" .) }} -{{- $dbName := (include "airbyte.database.name" .) }} -{{- $port := (include "airbyte.database.port" . ) }} -{{- printf "jdbc:postgresql://%s:%s/%s" $host $port $dbName }} -{{- end }} - -{{/* -Renders an environment variable definition that provides the database url (JDBC) -*/}} -{{- define "airbyte.database.url.env" }} -- name: DATABASE_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: DATABASE_URL -{{- end }} - -{{/* -Renders the name of the secret containing database credentials -*/}} -{{- define "airbyte.database.secretName" }} - {{- if .Values.global.database.secretName }} - {{- .Values.global.database.secretName | quote }} - {{- else }} - {{ .Release.Name }}-airbyte-secrets - {{- end }} -{{- end }} - -{{/* -Renders all of the common environment variables which provide database credentials -*/}} -{{- define "airbyte.database.envs" }} -{{ include "airbyte.database.host.env" . }} -{{ include "airbyte.database.port.env" . }} -{{ include "airbyte.database.name.env" . }} -{{ include "airbyte.database.user.env" . }} -{{ include "airbyte.database.password.env" . }} -{{ include "airbyte.database.url.env" . }} -{{- end }} - -{{/* -Renders a set of database secrets to be included in the shared Airbyte secret -*/}} -{{- define "airbyte.database.secrets" }} -{{ $user := (include "airbyte.database.user" .)}} -{{- if not (empty $user) }} -DATABASE_USER: {{ $user }} -{{- end }} -{{ $password := (include "airbyte.database.password" .)}} -{{- if not (empty $password) }} -DATABASE_PASSWORD: {{ $password }} -{{- end}} -{{- end }} - -{{/* -Renders a set of database configuration variables to be included in the shared Airbyte config map -*/}} -{{- define "airbyte.database.configVars" }} -DATABASE_HOST: {{ include "airbyte.database.host" . }} -DATABASE_PORT: {{ include "airbyte.database.port" . | quote }} -DATABASE_DB: {{ include "airbyte.database.name" . }} -DATABASE_URL: {{ include "airbyte.database.url" . }} -{{- if .Values.global.database.user }} -DATABASE_USER: {{ include "airbyte.database.user" . }} -{{- end}} -{{- if .Values.global.database.password }} -DATABASE_PASSWORD: {{ include "airbyte.database.password" . }} -{{- end}} -{{- end }} diff --git a/charts/v2/airbyte/templates/_enterprise.tpl b/charts/v2/airbyte/templates/_enterprise.tpl deleted file mode 100644 index 8e74d5aac7d..00000000000 --- a/charts/v2/airbyte/templates/_enterprise.tpl +++ /dev/null @@ -1,83 +0,0 @@ -{{/* -Enteprise Configuration -*/}} - -{{- define "airbyte.enterprise.license" -}} -{{- if and (or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise")) (not .Values.global.airbyteYml) }} -{{- $secretName := .Values.global.enterprise.secretName | required "You must set `global.enterprise.secretName` when `global.edition` is 'enterprise'" }} -{{- $secretKey := .Values.global.enterprise.licenseKeySecretKey | required "You must set `global.enterprise.licenseKeySecretKey` when `global.edition` is 'enterprise'" }} -- name: AIRBYTE_LICENSE_KEY - valueFrom: - secretKeyRef: - name: {{ .Values.global.enterprise.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: {{ .Values.global.enterprise.licenseKeySecretKey }} -{{- end }} -{{- end }} - -{{- define "airbyte.enterprise.instanceAdmin" -}} -{{- if and (or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise")) (not .Values.global.airbyteYml) }} -{{- $auth := .Values.global.auth | required "You must set `global.auth` when `global.edition` is 'enterprise'"}} -{{- $authInstanceAdminSecretName := .Values.global.auth.instanceAdmin.secretName | required "You must set `global.auth.instanceAdmin.secretName` when `global.edition` is 'enterprise'" }} -{{- $authInstanceAdminFirstName := .Values.global.auth.instanceAdmin.firstName | required "You must set `global.auth.instanceAdmin.firstName` when `global.edition` is 'enterprise'" }} -{{- $authInstanceAdminLastName := .Values.global.auth.instanceAdmin.lastName | required "You must set `global.auth.instanceAdmin.lastName` when `global.edition` is 'enterprise'" }} -{{- $authInstanceAdminEmailSecretKey := .Values.global.auth.instanceAdmin.emailSecretKey | required "You must set `global.auth.instanceAdmin.emailSecretKey` when `global.edition` is 'enterprise'" }} -{{- $authInstanceAdminPasswordSecretKey := .Values.global.auth.instanceAdmin.passwordSecretKey | required "You must set `global.auth.instanceAdmin.passwordSecretKey` when `global.edition` is 'enterprise'" }} -- name: INITIAL_USER_FIRST_NAME - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: INITIAL_USER_FIRST_NAME -- name: INITIAL_USER_LAST_NAME - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: INITIAL_USER_LAST_NAME -- name: INITIAL_USER_EMAIL - valueFrom: - secretKeyRef: - name: {{ .Values.global.auth.instanceAdmin.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: {{ .Values.global.auth.instanceAdmin.emailSecretKey }} -- name: INITIAL_USER_PASSWORD - valueFrom: - secretKeyRef: - name: {{ .Values.global.auth.instanceAdmin.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: {{ .Values.global.auth.instanceAdmin.passwordSecretKey }} -{{- end }} -{{- end }} - -{{- define "airbyte.enterprise.identityProvider" -}} -{{- if and (or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise")) (not .Values.global.airbyteYml) .Values.global.auth.identityProvider }} -{{- $authIdentityProviderSecretName := .Values.global.auth.identityProvider.secretName | required "You must set `global.auth.identityProvider.secretName` when enabling SSO" }} -{{- $authIdentityProviderType := .Values.global.auth.identityProvider.type | required "You must set `global.auth.identityProvider.type` when enabling SSO "}} -{{- $authIdentityProviderOIDC := .Values.global.auth.identityProvider.oidc | required "You must set `global.auth.identityProvider.oidc` when enabling SSO" }} -{{- $authIdentityProviderOIDCDomain := .Values.global.auth.identityProvider.oidc.domain | required "You must set `global.auth.identityProvider.oidc.domain` when enabling SSO" }} -{{- $authIdentityProviderOIDCAppName := .Values.global.auth.identityProvider.oidc.appName | required "You must set `global.auth.identityProvider.oidc.appName` when enabling SSO" }} -{{- $authIdentityProviderOIDCClientIdSecretKey := .Values.global.auth.identityProvider.oidc.clientIdSecretKey | required "You must set `global.auth.identityProvider.oidc.clientIdSecretKey` when enabling SSO" }} -{{- $authIdentityProviderOIDCClientSecretSecretKey := .Values.global.auth.identityProvider.oidc.clientSecretSecretKey | required "You must set `global.auth.identityProvider.oidc.clientSecretSecretKey` when enabling SSO" }} -- name: IDENTITY_PROVIDER_TYPE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: IDENTITY_PROVIDER_TYPE -- name: OIDC_DOMAIN - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: OIDC_DOMAIN -- name: OIDC_APP_NAME - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: OIDC_APP_NAME -- name: OIDC_CLIENT_ID - valueFrom: - secretKeyRef: - name: {{ .Values.global.auth.identityProvider.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: {{ .Values.global.auth.identityProvider.oidc.clientIdSecretKey }} -- name: OIDC_CLIENT_SECRET - valueFrom: - secretKeyRef: - name: {{ .Values.global.auth.identityProvider.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: {{ .Values.global.auth.identityProvider.oidc.clientSecretSecretKey }} -{{- end }} -{{- end }} diff --git a/charts/v2/airbyte/templates/_helpers.tpl b/charts/v2/airbyte/templates/_helpers.tpl index c85daa44335..451c9166544 100644 --- a/charts/v2/airbyte/templates/_helpers.tpl +++ b/charts/v2/airbyte/templates/_helpers.tpl @@ -5,6 +5,17 @@ Expand the name of the chart. {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} {{- end }} +{{/* +Returns the name of a given component +*/}} +{{- define "airbyte.componentName" -}} +{{ $tplPathParts := split "/" $.Template.Name }} +{{ $indexLast := printf "_%d" (sub (len $tplPathParts) 2) }} +{{ $componentName := trimPrefix "airbyte-" (index $tplPathParts $indexLast) }} +{{- printf "%s" $componentName }} +{{- end }} + + {{/* Create a default fully qualified app name. We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). @@ -47,11 +58,10 @@ Selector labels */}} {{- define "airbyte.selectorLabels" -}} {{ $tplPathParts := split "/" $.Template.Name }} -{{- if gt (len $tplPathParts) 3 }} -app.kubernetes.io/name: {{ trimPrefix "airbyte-" $tplPathParts._2 }} -{{- else }} -app.kubernetes.io/name: {{ .Chart.Name }} -{{- end }} +{{ $indexLast := printf "_%d" (sub (len $tplPathParts) 2) }} +{{ $componentName := trimPrefix "airbyte-" (index $tplPathParts $indexLast) }} +airbyte: {{ $componentName }} +app.kubernetes.io/name: {{ $componentName }} app.kubernetes.io/instance: {{ .Release.Name }} {{- end }} @@ -107,32 +117,6 @@ Create the name of the service account to use {{- end }} {{- end }} -{{/* -Add environment variables to configure minio -*/}} -{{- define "airbyte.storage.minio.endpoint" -}} -{{- if ((((.Values.global).storage).minio).endpoint) }} - {{- .Values.global.storage.minio.endpoint -}} -{{- else -}} - {{- printf "http://airbyte-minio-svc:9000" -}} -{{- end -}} -{{- end -}} - -{{- define "airbyte.s3PathStyleAccess" -}} -{{- ternary "true" "" (eq (lower (default "" .Values.global.storage.type)) "minio") -}} -{{- end -}} - -{{/* -Returns the GCP credentials path -*/}} -{{- define "airbyte.gcpLogCredentialsPath" -}} -{{- if ((((.Values.global).storage).gcs).credentialsPath) }} - {{- printf "%s" .Values.global.storage.gcs.credentialsPath -}} -{{- else -}} - {{- printf "%s" "/secrets/gcs-log-creds/gcp.json" -}} -{{- end -}} -{{- end -}} - {{/* Construct comma separated list of key/value pairs from object (useful for ENV var values) */}} @@ -155,189 +139,6 @@ Construct semi-colon delimited list of comma separated key/value pairs from arra {{ join ";" $mapList }} {{- end -}} -## DEFAULT HELM VALUES -# Secret Manager Defaults -{{/* -Define secret persistence -*/}} -{{- define "airbyte.secretPersistence" -}} -{{- if (((.Values.global).secretsManager).type) }} - {{- printf "%s" (snakecase .Values.global.secretsManager.type) }} -{{- else }} - {{- printf "" }} -{{- end }} -{{- end }} - -{{/* -Get secret store name or default -*/}} -{{- define "airbyte.secretStoreName" -}} -{{- $secretStoreName := . -}} -{{- if $secretStoreName -}} - {{- printf "%s" $secretStoreName -}} -{{- else -}} - {{- printf "airbyte-config-secrets" -}} -{{- end -}} -{{- end -}} - -{{/* -Get awsSecretManager access key id secret key or default -*/}} -{{- define "airbyte.awsSecretManagerAccessKeyIdSecretKey" -}} -{{- $awsSecretManagerAccessKeyIdSecretKey := . -}} -{{- if $awsSecretManagerAccessKeyIdSecretKey -}} - {{- printf "%s" $awsSecretManagerAccessKeyIdSecretKey -}} -{{- else -}} - {{- printf "aws-secret-manager-access-key-id" -}} -{{- end -}} -{{- end -}} - -{{/* -Get awsSecretManager secret access key secret key or default -*/}} -{{- define "airbyte.awsSecretManagerSecretAccessKeySecretKey" -}} -{{- $awsSecretManagerSecretAccessKeySecretKey := . -}} -{{- if $awsSecretManagerSecretAccessKeySecretKey -}} - {{- printf "%s" $awsSecretManagerSecretAccessKeySecretKey -}} -{{- else -}} - {{- printf "aws-secret-manager-secret-access-key" -}} -{{- end -}} -{{- end -}} - -{{/* -Get googleSecretManager credentials secret key or default -*/}} -{{- define "airbyte.googleSecretManagerCredentialsSecretKey" -}} -{{- $googleSecretManagerCredentialsSecretKey := . -}} -{{- if $googleSecretManagerCredentialsSecretKey -}} - {{- printf "%s" $googleSecretManagerCredentialsSecretKey -}} -{{- else -}} - {{- printf "google-secret-manager-credentials" -}} -{{- end -}} -{{- end -}} - -{{/* -Get vault auth token secret key or default -*/}} -{{- define "airbyte.vaultAuthTokenSecretKey" -}} -{{- $vaultAuthTokenSecretKey := . -}} -{{- if $vaultAuthTokenSecretKey -}} - {{- printf "%s" $vaultAuthTokenSecretKey -}} -{{- else -}} - {{- printf "vault-auth-token" -}} -{{- end -}} -{{- end -}} - - -# Storage Defaults -{{/* -Get storage type or default -*/}} -{{- define "airbyte.storageType" -}} -{{- $storageType := . -}} -{{- if $storageType -}} - {{- printf "%s" $storageType -}} -{{- else -}} - {{- printf "local" -}} -{{- end -}} -{{- end -}} - -{{/* -Get storage bucket log or default -*/}} -{{- define "airbyte.storageBucketLog" -}} -{{- $storageBucketLog := . -}} -{{- if $storageBucketLog -}} - {{- printf "%s" $storageBucketLog -}} -{{- else -}} - {{- printf "airbyte-storage" -}} -{{- end -}} -{{- end -}} - -{{/* -Get storage bucket state or default -*/}} -{{- define "airbyte.storageBucketState" -}} -{{- $storageBucketState := . -}} -{{- if $storageBucketState -}} - {{- printf "%s" $storageBucketState -}} -{{- else -}} - {{- printf "airbyte-storage" -}} -{{- end -}} -{{- end -}} - -{{/* -Get storage bucket workload output or default -*/}} -{{- define "airbyte.storageBucketWorkloadOutput" -}} -{{- $storageBucketWorkloadOutput := . -}} -{{- if $storageBucketWorkloadOutput -}} - {{- printf "%s" $storageBucketWorkloadOutput -}} -{{- else -}} - {{- printf "airbyte-storage" -}} -{{- end -}} -{{- end -}} - -{{/* -Get s3 access key id secret key or default -*/}} -{{- define "airbyte.s3AccessKeyIdSecretKey" -}} -{{- $s3AccessKeyIdSecretKey := . -}} -{{- if $s3AccessKeyIdSecretKey -}} - {{- printf "%s" $s3AccessKeyIdSecretKey -}} -{{- else -}} - {{- printf "s3-access-key-id" -}} -{{- end -}} -{{- end -}} - -{{/* -Get s3 secret access key secret key or default -*/}} -{{- define "airbyte.s3SecretAccessKeySecretKey" -}} -{{- $s3SecretAccessKeySecretKey := . -}} -{{- if $s3SecretAccessKeySecretKey -}} - {{- printf "%s" $s3SecretAccessKeySecretKey -}} -{{- else -}} - {{- printf "s3-secret-access-key" -}} -{{- end -}} -{{- end -}} - -{{/* -Get minio access key id secret key or default -*/}} -{{- define "airbyte.minioAccessKeyIdSecretKey" -}} -{{- $minioAccessKeyIdSecretKey := . -}} -{{- if $minioAccessKeyIdSecretKey -}} - {{- printf "%s" $minioAccessKeyIdSecretKey -}} -{{- else -}} - {{- printf "minio-access-key-id" -}} -{{- end -}} -{{- end -}} - -{{/* -Get minio secret access key secret key or default -*/}} -{{- define "airbyte.minioSecretAccessKeySecretKey" -}} -{{- $minioSecretAccessKeySecretKey := . -}} -{{- if $minioSecretAccessKeySecretKey -}} - {{- printf "%s" $minioSecretAccessKeySecretKey -}} -{{- else -}} - {{- printf "minio-secret-access-key" -}} -{{- end -}} -{{- end -}} - -{{/* -Get gcs credentials secret key or default -*/}} -{{- define "airbyte.gcsCredentialsSecretKey" -}} -{{- $gcsCredentialsSecretKey := . -}} -{{- if $gcsCredentialsSecretKey -}} - {{- printf "%s" $gcsCredentialsSecretKey -}} -{{- else -}} - {{- printf "gcs-credentials" -}} -{{- end -}} -{{- end -}} - {{/* Convert tags to a comma-separated list of key=value pairs. */}} @@ -353,3 +154,15 @@ Convert tags to a comma-separated list of key=value pairs. {{- end -}} {{- join "," $result -}} {{- end -}} + +{{/* +Hook for passing in extra config map vars +*/}} +{{- define "airbyte.extra.configVars" }} +{{- end }} + +{{/* +Hook for passing in extra secrets +*/}} +{{- define "airbyte.extra.secrets" }} +{{- end }} diff --git a/charts/v2/airbyte/templates/_images.tpl b/charts/v2/airbyte/templates/_images.tpl index 6c32ec44171..fc4fd60172f 100644 --- a/charts/v2/airbyte/templates/_images.tpl +++ b/charts/v2/airbyte/templates/_images.tpl @@ -8,10 +8,10 @@ {{/* some images are defined as a string instead of an object (busybox, curl, connector sidecar, etc) */}} {{- if (eq (typeOf $img) "string") -}} -{{- printf "%s%s" $reg (tpl $img $root) | quote -}} +{{- printf "%s%s" $reg (tpl $img $root) -}} {{- else -}} {{- $tag := coalesce $img.tag $root.Values.global.image.tag $root.Chart.AppVersion -}} -{{- printf "%s%s:%s" $reg $img.repository $tag | quote -}} +{{- printf "%s%s:%s" $reg $img.repository $tag -}} {{- end -}} -{{- end -}} \ No newline at end of file +{{- end -}} diff --git a/charts/v2/airbyte/templates/_keycloak.tpl b/charts/v2/airbyte/templates/_keycloak.tpl deleted file mode 100644 index ac0545dbec8..00000000000 --- a/charts/v2/airbyte/templates/_keycloak.tpl +++ /dev/null @@ -1,44 +0,0 @@ -{{/* -Keycloak Configuration -*/}} - -{{- define "airbyte.keycloak.database.user.env" }} -- name: KEYCLOAK_DATABASE_USERNAME - valueFrom: - secretKeyRef: - {{- if .Values.global.database.userSecretKey }} - name: {{ .Values.global.database.secretName }} - {{- else }} - name: {{ .Release.Name }}-airbyte-secrets - {{- end }} - key: {{ include "airbyte.database.userSecretKey" .}} -{{- end }} - -{{- define "airbyte.keycloak.database.password.env" }} -- name: KEYCLOAK_DATABASE_PASSWORD - valueFrom: - secretKeyRef: - name: {{ include "airbyte.database.secretName" . }} - key: {{ include "airbyte.database.passwordSecretKey" . }} -{{- end }} - -{{- define "airbyte.keycloak.database.url" }} -{{ $host := (include "airbyte.database.host" .) }} -{{ $dbName := (include "airbyte.database.name" .) }} -{{ $port := (include "airbyte.database.port" . ) }} -{{- printf "jdbc:postgresql://%s:%s/%s?currentSchema=keycloak" $host $port $dbName }} -{{- end }} - -{{- define "airbyte.keycloak.database.url.env" }} -- name: KEYCLOAK_DATABASE_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: KEYCLOAK_DATABASE_URL -{{- end }} - -{{- define "airbyte.keycloak.database.envs" }} -{{ include "airbyte.keycloak.database.user.env" . }} -{{ include "airbyte.keycloak.database.password.env" . }} -{{ include "airbyte.keycloak.database.url.env" . }} -{{- end }} diff --git a/charts/v2/airbyte/templates/_logging.tpl b/charts/v2/airbyte/templates/_logging.tpl deleted file mode 100644 index f1382739176..00000000000 --- a/charts/v2/airbyte/templates/_logging.tpl +++ /dev/null @@ -1,10 +0,0 @@ -{{/* -Common logging configs -*/}} -{{- define "airbyte.logging.envs" }} -- name: LOG_LEVEL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: LOG_LEVEL -{{- end }} diff --git a/charts/v2/airbyte/templates/_storage.tpl b/charts/v2/airbyte/templates/_storage.tpl deleted file mode 100644 index d9b624f645e..00000000000 --- a/charts/v2/airbyte/templates/_storage.tpl +++ /dev/null @@ -1,256 +0,0 @@ -{{/* -Storage configs -*/}} - -{{/* -Returns the provider name -*/}} -{{- define "airbyte.storage.provider" }} -{{- if .Values.global.storage.type }} - {{- printf "%s" (lower .Values.global.storage.type) }} -{{- else }} - {{- printf "minio" }} -{{- end }} -{{- end }} - -{{/* -Returns the storage provider secret name -*/}} -{{- define "airbyte.storage.secretName" }} -{{- if .Values.global.storage.secretName }} - {{- printf "%s" .Values.global.storage.secretName }} -{{- else if .Values.global.storage.storageSecretName }} - {{/* - NOTE: `storageSecretName` is the legacy name of this key, but we want to standardize on all configs - providing the name of the secret as `secretName`, under the respective section of `values.yaml`. - We continue to support this here for backwards compatibility. - */}} - {{- printf "%s" .Values.global.storage.storageSecretName }} -{{- else -}} - {{/* GCS has its own default secret we create */}} - {{- if eq (include "airbyte.storage.provider" .) "gcs" }} - {{- printf "%s-gcs-log-creds" .Release.Name }} - {{- else }} - {{- printf "%s-airbyte-secrets" .Release.Name }} - {{- end }} -{{- end }} -{{- end }} - -{{/* -Returns azure environment variables. -*/}} -{{- define "airbyte.storage.azure.envs" }} -{{- if .Values.global.storage.azure.connectionString }} -- name: AZURE_STORAGE_CONNECTION_STRING - value: {{ .Values.global.storage.azure.connectionString }} -{{- end }} -{{- if .Values.global.storage.azure.connectionStringSecretKey }} -- name: AZURE_STORAGE_CONNECTION_STRING - valueFrom: - secretKeyRef: - name: {{ include "airbyte.storage.secretName" . }} - key: {{ .Values.global.storage.azure.connectionStringSecretKey }} -{{- end }} -{{- end }} - -{{/* -Returns azure secrets -*/}} -{{- define "airbyte.storage.azure.secrets" }} -{{- if .Values.global.storage.azure }} -AZURE_STORAGE_CONNECTION_STRING: {{ .Values.global.storage.azure.connectionString | default "" }} -{{- end }} -{{- end }} - -{{/* -Returns S3 environment variables. -*/}} -{{- define "airbyte.storage.s3.envs" }} -{{- if eq .Values.global.storage.s3.authenticationType "credentials" }} -- name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.storage.secretName" . }} - key: {{ .Values.global.storage.s3.accessKeyIdSecretKey | default "s3-access-key-id" }} -- name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: {{ include "airbyte.storage.secretName" . }} - key: {{ .Values.global.storage.s3.secretAccessKeySecretKey | default "s3-secret-access-key" }} -{{- end }} -{{- if .Values.global.storage.s3.region }} -- name: AWS_DEFAULT_REGION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AWS_DEFAULT_REGION -{{- end }} -{{- end}} - -{{/* -Returns S3 secrets -*/}} -{{- define "airbyte.storage.s3.secrets" }} -{{- if and (.Values.global.storage.s3) (eq .Values.global.storage.s3.authenticationType "credentials") }} -AWS_ACCESS_KEY_ID: {{ .Values.global.storage.s3.accessKeyId | default "" }} -AWS_SECRET_ACCESS_KEY: {{ .Values.global.storage.s3.secretAccessKey | default "" }} -{{- end }} -{{- end }} - -{{/* -Returns GCS environment variables. -*/}} -{{- define "airbyte.storage.gcs.envs" }} -- name: GOOGLE_APPLICATION_CREDENTIALS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: GOOGLE_APPLICATION_CREDENTIALS -{{- end}} - -{{/* -Returns GCS secrets. -*/}} -{{- define "airbyte.storage.gcs.secrets" }} -{{- if .Values.global.storage.gcs }} -gcp.json: {{ .Values.global.storage.gcs.credentialsJson }} -{{- end }} -{{- end}} - -{{/* -Returns Minio environment variables. -*/}} -{{- define "airbyte.storage.minio.envs" }} -{{- if .Values.global.storage.minio }} -- name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.storage.secretName" . }} - key: {{ .Values.global.storage.minio.accessKeyIdSecretKey | default "MINIO_ACCESS_KEY_ID" }} -- name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: {{ include "airbyte.storage.secretName" . }} - key: {{ .Values.global.storage.minio.secretAccessKeySecretKey | default "MINIO_SECRET_ACCESS_KEY" }} -{{- else }} -- name: AWS_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.storage.secretName" . }} - key: "MINIO_ACCESS_KEY_ID" -- name: AWS_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: {{ include "airbyte.storage.secretName" . }} - key: "MINIO_SECRET_ACCESS_KEY" -{{- end }} -- name: MINIO_ENDPOINT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MINIO_ENDPOINT -{{- end }} - -{{- define "airbyte.storage.minio.secrets" }} -{{- if .Values.global.storage.minio }} -MINIO_ACCESS_KEY_ID: {{ .Values.global.storage.minio.accessKeyId | default "minio" | quote }} -MINIO_SECRET_ACCESS_KEY: {{ .Values.global.storage.minio.secretAccessKey | default "minio123" | quote }} -{{- else }} -MINIO_ACCESS_KEY_ID: "minio" -MINIO_SECRET_ACCESS_KEY: "minio123" -{{- end }} -{{- end }} - -{{/* -Returns storage config environment variables. -*/}} -{{- define "airbyte.storage.envs" }} -{{- $storageProvider := (include "airbyte.storage.provider" .) }} -- name: S3_PATH_STYLE_ACCESS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: S3_PATH_STYLE_ACCESS -- name: STORAGE_TYPE - value: {{ upper $storageProvider }} -- name: STORAGE_BUCKET_ACTIVITY_PAYLOAD - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STORAGE_BUCKET_ACTIVITY_PAYLOAD -- name: STORAGE_BUCKET_LOG - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STORAGE_BUCKET_LOG -- name: STORAGE_BUCKET_STATE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STORAGE_BUCKET_STATE -- name: STORAGE_BUCKET_WORKLOAD_OUTPUT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STORAGE_BUCKET_WORKLOAD_OUTPUT -{{/* S3 */}} -{{- if eq $storageProvider "s3" }} -{{- include "airbyte.storage.s3.envs" . }} -{{- end }} - -{{/* GCS */}} -{{- if eq $storageProvider "gcs" }} -{{- include "airbyte.storage.gcs.envs" . }} -{{- end }} - -{{/* MINIO */}} -{{- if eq $storageProvider "minio" }} -{{- include "airbyte.storage.minio.envs" . }} -{{- end }} - -{{/* AZURE */}} -{{- if eq $storageProvider "azure" }} -{{- include "airbyte.storage.azure.envs" . }} -{{- end }} - -{{/* LOCAl */}} -{{- if eq $storageProvider "local" }} -- name: LOCAL_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: LOCAL_ROOT -{{- end }} -{{- end }} - -{{/* -Returns storage config variables. -*/}} -{{- define "airbyte.storage.configVars" }} -{{- end }} - -{{/* -Returns storage config secrets. -*/}} -{{- define "airbyte.storage.secrets" }} -{{- $storageProvider := (include "airbyte.storage.provider" .) }} -{{/* S3 */}} -{{- if eq $storageProvider "s3" }} -{{- include "airbyte.storage.s3.secrets" . }} -{{- end }} - -{{/* GCS */}} -{{- if eq $storageProvider "gcs" }} -{{- include "airbyte.storage.gcs.secrets" . }} -{{- end }} - -{{/* MINIO */}} -{{- if eq $storageProvider "minio" }} -{{- include "airbyte.storage.minio.secrets" . }} -{{- end }} - -{{/* AZURE */}} -{{- if eq $storageProvider "azure" }} -{{- include "airbyte.storage.azure.secrets" . }} -{{- end }} -{{- end }} diff --git a/charts/v2/airbyte/templates/_temporal.tpl b/charts/v2/airbyte/templates/_temporal.tpl deleted file mode 100644 index c4e213c9e37..00000000000 --- a/charts/v2/airbyte/templates/_temporal.tpl +++ /dev/null @@ -1,58 +0,0 @@ -{{/* -Temporal Configuration -*/}} - -{{- define "airbyte.temporal.database.host.env" }} -- name: POSTGRES_SEEDS - valueFrom: - {{- if .Values.global.database.hostSecretKey }} - secretKeyRef: - name: {{ include "airbyte.database.secretName" . }} - key: {{ .Values.global.database.hostSecretKey }} - {{- else }} - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: DATABASE_HOST - {{- end }} -{{- end }} - -{{- define "airbyte.temporal.database.port.env" }} -- name: DB_PORT - valueFrom: - {{- if .Values.global.database.portSecretKey }} - secretKeyRef: - name: {{ include "airbyte.database.secretName" . }} - key: {{ .Values.global.database.portSecretKey }} - {{- else }} - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: DATABASE_PORT - {{- end }} -{{- end }} - -{{- define "airbyte.temporal.database.user.env" }} -- name: POSTGRES_USER - valueFrom: - secretKeyRef: - {{- if .Values.global.database.userSecretKey }} - name: {{ .Values.global.database.secretName }} - {{- else }} - name: {{ .Release.Name }}-airbyte-secrets - {{- end }} - key: {{ include "airbyte.database.userSecretKey" .}} -{{- end }} - -{{- define "airbyte.temporal.database.password.env" }} -- name: POSTGRES_PWD - valueFrom: - secretKeyRef: - name: {{ include "airbyte.database.secretName" . }} - key: {{ include "airbyte.database.passwordSecretKey" . }} -{{- end }} - -{{- define "airbyte.temporal.database.envs" }} -{{ include "airbyte.temporal.database.host.env" . }} -{{ include "airbyte.temporal.database.port.env" . }} -{{ include "airbyte.temporal.database.user.env" . }} -{{ include "airbyte.temporal.database.password.env" . }} -{{- end }} diff --git a/charts/v2/airbyte/templates/airbyte-bootloader/pod.yaml b/charts/v2/airbyte/templates/airbyte-bootloader/pod.yaml index 2bf11f56cc4..048dafaa9c5 100644 --- a/charts/v2/airbyte/templates/airbyte-bootloader/pod.yaml +++ b/charts/v2/airbyte/templates/airbyte-bootloader/pod.yaml @@ -44,45 +44,14 @@ spec: image: {{ include "imageUrl" (list .Values.airbyteBootloader.image $) }} imagePullPolicy: "{{ .Values.airbyteBootloader.image.pullPolicy }}" env: - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Values.global.configMapName | default (printf "%s-airbyte-env" .Release.Name) }} - key: AIRBYTE_VERSION - - name: RUN_DATABASE_MIGRATION_ON_STARTUP - valueFrom: - configMapKeyRef: - name: {{ .Values.global.configMapName | default (printf "%s-airbyte-env" .Release.Name) }} - key: RUN_DATABASE_MIGRATION_ON_STARTUP - - # Airbyte auth secret keys and values - {{- if and (eq .Values.global.edition "community") .Values.global.auth.enabled }} - - name: AB_AUTH_SECRET_CREATION_ENABLED - value: "true" - - name: AB_KUBERNETES_SECRET_NAME - value: {{ .Values.global.auth.secretName | default "airbyte-auth-secrets" | quote }} - - name: AB_INSTANCE_ADMIN_PASSWORD_SECRET_KEY - value: {{ .Values.global.auth.instanceAdmin.passwordSecretKey | default "instance-admin-password" | quote }} - - name: AB_INSTANCE_ADMIN_CLIENT_ID_SECRET_KEY - value: {{ .Values.global.auth.instanceAdmin.clientIdSecretKey | default "instance-admin-client-id" | quote }} - - name: AB_INSTANCE_ADMIN_CLIENT_SECRET_SECRET_KEY - value: {{ .Values.global.auth.instanceAdmin.clientSecretSecretKey | default "instance-admin-client-secret" | quote }} - - name: AB_JWT_SIGNATURE_SECRET_KEY - value: {{ .Values.global.auth.jwtSignatureSecretKey | default "jwt-signature-secret" | quote }} - - name: AB_INSTANCE_ADMIN_PASSWORD - value: {{ .Values.global.auth.instanceAdmin.password | quote }} - - name: AB_INSTANCE_ADMIN_CLIENT_ID - value: {{ .Values.global.auth.instanceAdmin.clientId | quote }} - - name: AB_INSTANCE_ADMIN_CLIENT_SECRET - value: {{ .Values.global.auth.instanceAdmin.clientSecret | quote }} - - name: AB_JWT_SIGNATURE_SECRET - value: {{ .Values.global.auth.jwtSignatureSecret | quote }} - {{- end }} - + {{- include "airbyte.common.version.env" . | nindent 8 }} {{- include "airbyte.database.envs" . | nindent 8 }} + {{- include "airbyte.database.migrations.runAtStartup.env" . | nindent 8 }} + {{- if (eq .Values.global.edition "community") }} + {{- include "airbyte.auth.bootstrap.envs" . | nindent 8 }} {{- end }} + # Values from secret {{- if .Values.airbyteBootloader.secrets }} {{- range $k, $v := .Values.airbyteBootloader.secrets }} diff --git a/charts/v2/airbyte/templates/airbyte-connector-builder-server/deployment.yaml b/charts/v2/airbyte/templates/airbyte-connector-builder-server/deployment.yaml index edc3a77d52e..a8672696b26 100644 --- a/charts/v2/airbyte/templates/airbyte-connector-builder-server/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-connector-builder-server/deployment.yaml @@ -59,42 +59,13 @@ spec: - name: JAVA_TOOL_OPTIONS value: "-Xdebug -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:{{ .Values.connectorBuilderServer.debug.remoteDebugPort }},server=y,suspend=n" {{- end}} - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TRACKING_STRATEGY + + {{- include "airbyte.common.envs" . | nindent 8 }} + {{- include "airbyte.metrics.envs" . | nindent 8 }} + {{- include "airbyte.tracking.envs" . | nindent 8 }} + + {{- if (eq .Values.global.edition "enterprise") }} + {{- include "airbyte.enterprise.envs" . | nindent 8 }} {{- end }} # Values from secret diff --git a/charts/v2/airbyte/templates/airbyte-connector-rollout-worker/deployment.yaml b/charts/v2/airbyte/templates/airbyte-connector-rollout-worker/deployment.yaml index 581a27d66ee..b40e7cb848b 100644 --- a/charts/v2/airbyte/templates/airbyte-connector-rollout-worker/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-connector-rollout-worker/deployment.yaml @@ -63,78 +63,16 @@ spec: - name: JAVA_TOOL_OPTIONS value: "-Xdebug -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:{{ .Values.connectorRolloutWorker.debug.remoteDebugPort }},server=y,suspend=n" {{- end }} - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: INTERNAL_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: INTERNAL_API_HOST - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TRACKING_STRATEGY - - name: WEBAPP_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WEBAPP_URL - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_HOST - - name: TEMPORAL_WORKER_PORTS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_WORKER_PORTS - - name: LOG_LEVEL - value: "{{ .Values.connectorRolloutWorker.log.level }}" - {{- end }} - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - - name: AIRBYTE_API_AUTH_HEADER_NAME - value: "X-Airbyte-Auth" - - name: AIRBYTE_API_AUTH_HEADER_VALUE - value: "Internal worker" - {{- else if and (eq .Values.global.deploymentMode "oss") .Values.global.auth.enabled }} - # Self-Managed Enterprise and Community w/ auth enabled use the same auth header, just - # splitting into two separate blocks for readability. - - name: AIRBYTE_API_AUTH_HEADER_NAME - value: "X-Airbyte-Auth" + - name: AIRBYTE_API_AUTH_HEADER_VALUE value: "Internal worker" - {{- end }} + + {{- include "airbyte.common.envs" . | nindent 8 }} + {{- include "airbyte.logging.envs" . | nindent 8 }} + {{- include "airbyte.metrics.envs" . | nindent 8 }} + {{- include "airbyte.tracking.envs" . | nindent 8 }} + {{- include "airbyte.temporal.envs" . | nindent 8 }} + {{- include "airbyte.temporal.worker.envs" . | nindent 8 }} # Values from secret {{- if .Values.connectorRolloutWorker.secrets }} diff --git a/charts/v2/airbyte/templates/airbyte-cron/deployment.yaml b/charts/v2/airbyte/templates/airbyte-cron/deployment.yaml index 67c7e8cacf3..0c367dc2bd3 100644 --- a/charts/v2/airbyte/templates/airbyte-cron/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-cron/deployment.yaml @@ -1,3 +1,4 @@ +{{- if .Values.cron.enabled }} apiVersion: apps/v1 kind: Deployment metadata: @@ -56,89 +57,20 @@ spec: image: {{ include "imageUrl" (list .Values.cron.image $) }} imagePullPolicy: "{{ .Values.cron.image.pullPolicy }}" env: - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: MICRONAUT_ENVIRONMENTS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CRON_MICRONAUT_ENVIRONMENTS - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_HOST - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TRACKING_STRATEGY - - name: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - - name: WORKLOAD_API_BEARER_TOKEN - valueFrom: - secretKeyRef: - name: {{ index .Values.cron "workloadApi" "bearerTokenSecretName" | default (printf "%s-airbyte-secrets" .Release.Name ) }} - key: {{ index .Values.cron "workloadApi" "bearerTokenSecretKey" | default "WORKLOAD_API_BEARER_TOKEN" }} - - name: WORKLOAD_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKLOAD_API_HOST - - name: WORKSPACE_DOCKER_MOUNT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKSPACE_DOCKER_MOUNT - - name: WORKSPACE_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKSPACE_ROOT - # Database + {{- include "airbyte.common.envs" . | nindent 10 }} {{- include "airbyte.database.envs" . | nindent 10 }} + {{- include "airbyte.database.migrations.envs" . | nindent 10 }} + {{- include "airbyte.metrics.envs" . | nindent 10 }} + {{- include "airbyte.micronaut.envs" . | nindent 10 }} + {{- include "airbyte.temporal.envs" . | nindent 10 }} + {{- include "airbyte.tracking.envs" . | nindent 10 }} + {{- include "airbyte.worker.envs" . | nindent 10 }} + {{- if (eq .Values.global.edition "enterprise") }} + {{- include "airbyte.enterprise.envs" . | nindent 10 }} {{- end }} - # Values from secret {{- if .Values.cron.secrets }} {{- range $k, $v := .Values.cron.secrets }} @@ -185,3 +117,4 @@ spec: {{- if .Values.cron.extraVolumes }} volumes: {{ toYaml .Values.cron.extraVolumes | nindent 6 }} {{- end }} +{{- end }} diff --git a/charts/v2/airbyte/templates/airbyte-featureflag-server/deployment.yaml b/charts/v2/airbyte/templates/airbyte-featureflag-server/deployment.yaml index 5ea90e3ec68..5d9497125f2 100644 --- a/charts/v2/airbyte/templates/airbyte-featureflag-server/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-featureflag-server/deployment.yaml @@ -63,34 +63,9 @@ spec: - name: JAVA_TOOL_OPTIONS value: "-Xdebug -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:{{ .Values.featureflagServer.debug.remoteDebugPort }},server=y,suspend=n" {{- end }} - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_PORT - {{- end }} + {{- include "airbyte.common.envs" . | nindent 8 }} + {{- include "airbyte.metrics.envs" . | nindent 8 }} # Values from env {{- if or .Values.featureflagServer.env_vars .Values.global.env_vars }} diff --git a/charts/v2/airbyte/templates/airbyte-keycloak-setup/job.yaml b/charts/v2/airbyte/templates/airbyte-keycloak-setup/job.yaml index 636f81d0a45..3b5a3295328 100644 --- a/charts/v2/airbyte/templates/airbyte-keycloak-setup/job.yaml +++ b/charts/v2/airbyte/templates/airbyte-keycloak-setup/job.yaml @@ -42,11 +42,7 @@ spec: image: {{ include "imageUrl" (list .Values.keycloakSetup.initContainers.keycloakReadinessCheck.image $) }} command: [ "sh", "-c", "until curl --output /dev/null --head --fail http://${KEYCLOAK_INTERNAL_HOST}/auth/health/ready; do sleep 1; done;" ] env: - - name: KEYCLOAK_INTERNAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Values.global.configMapName | default (printf "%s-airbyte-env" .Release.Name) }} - key: KEYCLOAK_INTERNAL_HOST + {{- include "airbyte.keycloak.admin.client.internalHost.env" . | nindent 12 }} securityContext: {{- toYaml .Values.keycloakSetup.initContainerSecurityContext | nindent 12 }} {{- if .Values.keycloakSetup.extraInitContainers }} @@ -55,33 +51,23 @@ spec: containers: - name: airbyte-keycloak-setup-container image: {{ include "imageUrl" (list .Values.keycloakSetup.image $) }} - imagePullPolicy: "{{ .Values.keycloakSetup.image.pullPolic }}" + imagePullPolicy: "{{ .Values.keycloakSetup.image.pullPolicy }}" env: - - name: AIRBYTE_URL - valueFrom: - configMapKeyRef: - name: {{ .Values.global.configMapName | default (printf "%s-airbyte-env" .Release.Name) }} - key: AIRBYTE_URL - - name: KEYCLOAK_ADMIN_USER - valueFrom: - secretKeyRef: - name: {{ .Values.global.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: KEYCLOAK_ADMIN_USER - - name: KEYCLOAK_ADMIN_PASSWORD - valueFrom: - secretKeyRef: - name: {{ .Values.global.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: KEYCLOAK_ADMIN_PASSWORD - - name: KEYCLOAK_INTERNAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Values.global.configMapName | default (printf "%s-airbyte-env" .Release.Name) }} - key: KEYCLOAK_INTERNAL_HOST + {{- if .Values.keycloakSetup.debug.enabled }} + - name: JAVA_TOOL_OPTIONS + value: "-Xdebug -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:{{ .Values.keycloakSetup.debug.remoteDebugPort }},server=y,suspend=y" + {{- end}} + {{- include "airbyte.common.envs" . | nindent 12 }} {{- include "airbyte.database.envs" . | nindent 12 }} - {{- include "airbyte.enterprise.instanceAdmin" . | nindent 12 }} - {{- include "airbyte.enterprise.identityProvider" . | nindent 12 }} - {{- include "airbyte.keycloak.database.envs" . | nindent 12 }} + {{- include "airbyte.keycloak.envs" . | nindent 12 }} + {{- include "airbyte.keycloak.admin.client.envs" . | nindent 12 }} + {{- include "airbyte.keycloak.admin.user.envs" . | nindent 12 }} + + {{- if (eq .Values.global.edition "enterprise") }} + {{- include "airbyte.enterprise.envs" . | nindent 12 }} + {{- include "airbyte.auth.instanceAdmin.enterprise.envs" . | nindent 12 }} + {{- end}} # Values from secret {{- if .Values.keycloakSetup.secrets }} @@ -107,6 +93,13 @@ spec: {{- toYaml .Values.keycloakSetup.extraEnv | nindent 12 }} {{- end }} + {{- if .Values.keycloakSetup.debug.enabled }} + ports: + - name: debug + containerPort: {{ .Values.keycloakSetup.debug.remoteDebugPort }} + protocol: TCP + {{- end}} + {{- if .Values.keycloakSetup.resources }} resources: {{- toYaml .Values.keycloakSetup.resources | nindent 10 }} {{- end }} @@ -118,10 +111,10 @@ spec: readOnly: true {{- end }} {{- if .Values.keycloakSetup.extraVolumeMount }} - {{- toYaml .Values.keycloakSetup.extraVolumeMounts | nindent 10 }} + {{- toYaml .Values.keycloakSetup.extraVolumeMounts | nindent 12 }} {{- end }} {{- if .Values.keycloakSetup.containerSecurityContext }} - securityContext: {{- toYaml .Values.keycloakSetup.containerSecurityContext | nindent 10 }} + securityContext: {{- toYaml .Values.keycloakSetup.containerSecurityContext | nindent 12 }} {{- end }} {{- if .Values.keycloakSetup.extraContainers }} {{ toYaml .Values.keycloakSetup.extraContainers | nindent 8 }} @@ -129,7 +122,7 @@ spec: {{- if .Values.global.extraContainers }} {{ toYaml .Values.global.extraContainers | nindent 8 }} {{- end }} - securityContext: {{- toYaml .Values.keycloakSetup.podSecurityContext | nindent 6 }} + securityContext: {{- toYaml .Values.keycloakSetup.podSecurityContext | nindent 8 }} volumes: {{- if .Values.global.airbyteYml }} - name: airbyte-yml-volume diff --git a/charts/v2/airbyte/templates/airbyte-keycloak/statefulset.yaml b/charts/v2/airbyte/templates/airbyte-keycloak/statefulset.yaml index 46d120aa8f7..d2b4acbdfca 100644 --- a/charts/v2/airbyte/templates/airbyte-keycloak/statefulset.yaml +++ b/charts/v2/airbyte/templates/airbyte-keycloak/statefulset.yaml @@ -75,29 +75,9 @@ spec: image: {{ include "imageUrl" (list .Values.keycloak.image $)}} imagePullPolicy: {{ .Values.keycloak.image.pullPolicy }} env: - {{- if eq .Values.global.deploymentMode "oss" }} - - name: KEYCLOAK_ADMIN_USER - valueFrom: - secretKeyRef: - name: {{ .Values.global.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: KEYCLOAK_ADMIN_USER - - name: KEYCLOAK_ADMIN_PASSWORD - valueFrom: - secretKeyRef: - name: {{ .Values.global.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: KEYCLOAK_ADMIN_PASSWORD - - name: KEYCLOAK_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: KEYCLOAK_PORT - - name: JAVA_OPTS_APPEND - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: KEYCLOAK_JAVA_OPTS_APPEND + {{- include "airbyte.keycloak.envs" . | nindent 12 }} + {{- include "airbyte.keycloak.admin.user.envs" . | nindent 12 }} {{- include "airbyte.keycloak.database.envs" . | nindent 12 }} - {{- end }} # end if oss {{- if .Values.keycloak.extraEnv }} {{ .Values.keycloak.extraEnv | toYaml | nindent 12 }} diff --git a/charts/v2/airbyte/templates/airbyte-metrics/deployment.yaml b/charts/v2/airbyte/templates/airbyte-metrics/deployment.yaml index 1b7d14fe3d1..f9616d86d14 100644 --- a/charts/v2/airbyte/templates/airbyte-metrics/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-metrics/deployment.yaml @@ -50,31 +50,11 @@ spec: image: {{ include "imageUrl" (list .Values.metrics.image $) }} imagePullPolicy: "{{ .Values.metrics.image.pullPolicy }}" env: - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: METRIC_CLIENT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: METRIC_CLIENT - - name: OTEL_COLLECTOR_ENDPOINT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: OTEL_COLLECTOR_ENDPOINT - + {{- include "airbyte.common.envs" . | nindent 8 }} {{- include "airbyte.database.envs" . | nindent 8 }} + {{- include "airbyte.database.migrations.envs" . | nindent 8 }} + {{- include "airbyte.metrics.envs" . | nindent 8 }} - {{- end }} # Values from secret {{- if .Values.metrics.secrets }} {{- range $k, $v := .Values.metrics.secrets }} diff --git a/charts/v2/airbyte/templates/airbyte-server/deployment.yaml b/charts/v2/airbyte/templates/airbyte-server/deployment.yaml index 7d05518c591..2f71d5fb151 100644 --- a/charts/v2/airbyte/templates/airbyte-server/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-server/deployment.yaml @@ -63,278 +63,37 @@ spec: - name: JAVA_TOOL_OPTIONS value: "-Xdebug -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:{{ .Values.server.debug.remoteDebugPort }},server=y,suspend=n" {{- end}} - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - - name: API_AUTHORIZATION_ENABLED - value: "true" - {{- else if and (eq .Values.global.deploymentMode "oss") .Values.global.auth.enabled }} - # Self-Managed Enterprise should always have API_AUTHORIZATION_ENABLED set to true, even - # if global.auth.enabled is not set to true. This can be simplified in the future, once - # globa.auth.enabled is changed to always default to true across all editions of Airbyte. - - name: API_AUTHORIZATION_ENABLED - value: "true" - {{- end }} - {{- if eq .Values.global.deploymentMode "oss" }} - {{- include "airbyte.logging.envs" . | nindent 8 }} - - name: AIRBYTE_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_API_HOST - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: AIRBYTE_EDITION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_EDITION - - name: AIRBYTE_URL - valueFrom: - configMapKeyRef: - name: {{ .Values.global.configMapName | default (printf "%s-airbyte-env" .Release.Name) }} - key: AIRBYTE_URL - - name: CONFIG_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONFIG_ROOT - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: MICRONAUT_ENVIRONMENTS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SERVER_MICRONAUT_ENVIRONMENTS - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TRACKING_STRATEGY - - name: WORKER_ENVIRONMENT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKER_ENVIRONMENT - - name: WORKSPACE_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKSPACE_ROOT - - name: WEBAPP_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WEBAPP_URL - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_HOST - - name: JOB_MAIN_CONTAINER_CPU_REQUEST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_REQUEST - - name: JOB_MAIN_CONTAINER_CPU_LIMIT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_LIMIT - - name: JOB_MAIN_CONTAINER_MEMORY_REQUEST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_REQUEST - - name: JOB_MAIN_CONTAINER_MEMORY_LIMIT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_LIMIT - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: KEYCLOAK_INTERNAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: KEYCLOAK_INTERNAL_HOST - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - - name: KEYCLOAK_ADMIN_USER - valueFrom: - secretKeyRef: - name: {{ .Values.global.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: KEYCLOAK_ADMIN_USER - - name: KEYCLOAK_ADMIN_PASSWORD - valueFrom: - secretKeyRef: - name: {{ .Values.global.secretName | default (printf "%s-airbyte-secrets" .Release.Name) }} - key: KEYCLOAK_ADMIN_PASSWORD - {{- end }} - - name: CONNECTOR_BUILDER_SERVER_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONNECTOR_BUILDER_SERVER_API_HOST - - name: AIRBYTE_API_AUTH_HEADER_NAME - value: "X-Airbyte-Auth" + - name: AIRBYTE_API_AUTH_HEADER_VALUE value: "Internal server" - - name: ENTERPRISE_SOURCE_STUBS_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: ENTERPRISE_SOURCE_STUBS_URL - - {{- if and (eq .Values.global.edition "community") .Values.global.auth.enabled }} - # Values for Airbyte auth configurations, secrets, and credentials in Community edition - - name: AB_INSTANCE_ADMIN_PASSWORD - valueFrom: - secretKeyRef: - name: {{ .Values.global.auth.secretName | default "airbyte-auth-secrets" | quote }} - key: {{ .Values.global.auth.instanceAdmin.passwordSecretKey | default "instance-admin-password" | quote }} - - name: AB_INSTANCE_ADMIN_CLIENT_ID - valueFrom: - secretKeyRef: - name: {{ .Values.global.auth.secretName | default "airbyte-auth-secrets" | quote }} - key: {{ .Values.global.auth.instanceAdmin.clientIdSecretKey | default "instance-admin-client-id" | quote }} - - name: AB_INSTANCE_ADMIN_CLIENT_SECRET - valueFrom: - secretKeyRef: - name: {{ .Values.global.auth.secretName | default "airbyte-auth-secrets" | quote }} - key: {{ .Values.global.auth.instanceAdmin.clientSecretSecretKey | default "instance-admin-client-secret" | quote }} - - name: AB_JWT_SIGNATURE_SECRET - valueFrom: - secretKeyRef: - name: {{ .Values.global.auth.secretName | default "airbyte-auth-secrets" | quote }} - key: {{ .Values.global.auth.jwtSignatureSecretKey | default "jwt-signature-secret" | quote }} - - name: AB_COOKIE_SECURE - value: {{ .Values.global.auth.cookieSecureSetting | default "true" | quote }} - - name: AB_COOKIE_SAME_SITE - value: {{ .Values.global.auth.cookieSameSiteSetting | default "Strict" | quote }} - {{- end }} - - # Secrets Manager - - name: SECRET_PERSISTENCE - value: {{ include "airbyte.secretPersistence" . }} - # Values for AwsSecretsManager - {{- if eq ((((.Values.global).secretsManager).awsSecretManager).authenticationType) "credentials" }} - - name: AWS_SECRET_MANAGER_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.awsSecretManagerAccessKeyIdSecretKey" .Values.global.secretsManager.awsSecretManager.accessKeyIdSecretKey }} - - name: AWS_SECRET_MANAGER_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.awsSecretManagerSecretAccessKeySecretKey" .Values.global.secretsManager.awsSecretManager.secretAccessKeySecretKey }} - {{- end }} - {{- if ((((.Values.global).secretsManager).awsSecretManager).region) }} - - name: AWS_SECRET_MANAGER_REGION - value: {{ (((.Values.global).secretsManager).awsSecretManager).region }} - {{- end }} - {{- if ((((.Values.global).secretsManager).awsSecretManager).tags) }} - - name: AWS_SECRET_MANAGER_SECRET_TAGS - value: {{ include "airbyte.tagsToString" .Values.global.secretsManager.awsSecretManager.tags }} - {{- end }} - {{- if ((((.Values.global).secretsManager).awsSecretManager).kms) }} - - name: AWS_KMS_KEY_ARN - value: {{ ((((.Values.global).secretsManager).awsSecretManager).kms) | default "" }} - {{- end }} - - # Values for Azure Key Vault - {{- if (((.Values.global).secretsManager).azureKeyVault) }} - - - name: AB_AZURE_KEY_VAULT_VAULT_URL - value: {{ (((.Values.global).secretsManager).azureKeyVault).vaultUrl }} - - - name: AB_AZURE_KEY_VAULT_TENANT_ID - value: {{ (((.Values.global).secretsManager).azureKeyVault).tenantId }} - - - name: AB_AZURE_KEY_VAULT_CLIENT_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.azureKeyVaultClientIdSecretKey" .Values.global.secretsManager.azureKeyVault.clientIdSecretKey }} - - name: AB_AZURE_KEY_VAULT_CLIENT_SECRET - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.azureKeyVaultClientSecretSecretKey" .Values.global.secretsManager.azureKeyVault.clientSecretSecretKey }} - - {{- end }} - - {{- if ((((.Values.global).secretsManager).azureKeyVault).tags) }} - - name: AB_AZURE_KEY_VAULT_TAGS - value: {{ include "airbyte.tagsToString" .Values.global.secretsManager.azureKeyVault.tags }} - {{- end }} - - # Values for googleSecretManager secrets - {{- if (((.Values.global).secretsManager).googleSecretManager) }} - - name: SECRET_STORE_GCP_PROJECT_ID - value: {{ .Values.global.secretsManager.googleSecretManager.projectId }} - - name: SECRET_STORE_GCP_CREDENTIALS - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.googleSecretManagerCredentialsSecretKey" .Values.global.secretsManager.googleSecretManager.credentialsSecretKey }} - {{- end }} - - # Values for vault secrets - {{- if (((.Values.global).secretsManager).vault) }} - - name: VAULT_ADDRESS - value: {{ (((.Values.global).secretsManager).vault).address }} - - name: VAULT_PREFIX - value: {{ (((.Values.global).secretsManager).vault).prefix }} - - name: VAULT_AUTH_TOKEN - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.vaultAuthTokenSecretKey" .Values.global.secretsManager.vault.authTokenSecretKey }} - {{- end }} - - # Storage - {{- include "airbyte.storage.envs" . | nindent 8 }} + {{- include "airbyte.common.envs" . | nindent 8 }} + {{- include "airbyte.auth.security.envs" . | nindent 8 }} + {{- include "airbyte.connector.envs" . | nindent 8 }} {{- include "airbyte.database.envs" . | nindent 8 }} - {{- include "airbyte.enterprise.instanceAdmin" . | nindent 8 }} - {{- include "airbyte.enterprise.identityProvider" . | nindent 8 }} - {{- include "airbyte.enterprise.license" . | nindent 8 }} + {{- include "airbyte.database.migrations.envs" . | nindent 8 }} + {{- include "airbyte.logging.envs" . | nindent 8 }} + {{- include "airbyte.metrics.envs" . | nindent 8 }} + {{- include "airbyte.micronaut.envs" . | nindent 8 }} + {{- include "airbyte.storage.envs" . | nindent 8 }} + {{- include "airbyte.secretsManager.envs" . | nindent 8 }} + {{- include "airbyte.temporal.envs" . | nindent 8 }} + {{- include "airbyte.tracking.envs" . | nindent 8 }} + {{- include "airbyte.worker.envs" . | nindent 8 }} + {{- include "airbyte.workloads.resources.envs" . | nindent 8 }} + + {{- if or (eq .Values.global.edition "enterprise") (eq .Values.global.edition "pro") }} + {{- include "airbyte.auth.instanceAdmin.enterprise.envs" . | nindent 8 }} + {{- include "airbyte.auth.identityProvider.envs" . | nindent 8 }} + {{- include "airbyte.enterprise.envs" . | nindent 8 }} + {{- include "airbyte.keycloak.envs" . | nindent 8 }} + {{- include "airbyte.keycloak.admin.client.envs" . | nindent 8 }} + {{- include "airbyte.keycloak.admin.user.envs" . | nindent 8 }} + {{- else }} + {{- include "airbyte.auth.envs" . | nindent 8 }} + {{- include "airbyte.auth.jwt.envs" . | nindent 8 }} + {{- end}} - {{- end }} - # Values from secret {{- if .Values.server.secrets }} {{- range $k, $v := .Values.server.secrets }} diff --git a/charts/v2/airbyte/templates/airbyte-temporal/deployment.yaml b/charts/v2/airbyte/templates/airbyte-temporal/deployment.yaml index 7566d702398..f251bc428f8 100644 --- a/charts/v2/airbyte/templates/airbyte-temporal/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-temporal/deployment.yaml @@ -56,29 +56,8 @@ spec: image: {{ include "imageUrl" (list .Values.temporal.image $) }} imagePullPolicy: {{ .Values.temporal.image.pullPolicy }} env: - {{- if eq .Values.global.deploymentMode "oss"}} - - name: AUTO_SETUP - value: "true" - - name: DB # The DB engine to use - value: "postgresql" - - name: DYNAMIC_CONFIG_FILE_PATH - value: "config/dynamicconfig/development.yaml" - {{- include "airbyte.temporal.database.envs" . | nindent 10 }} - - {{- end }} - - {{- if eq .Values.global.database.type "external" }} - # Assume an external database requires SSL. - - name: POSTGRES_TLS_ENABLED - value: "true" - - name: POSTGRES_TLS_DISABLE_HOST_VERIFICATION - value: "true" - - name: SQL_TLS_ENABLED - value: "true" - - name: SQL_TLS_DISABLE_HOST_VERIFICATION - value: "true" - {{- end }} + {{- include "airbyte.temporal.envs" . | nindent 10 }} {{- if .Values.temporal.extraEnv }} {{ .Values.temporal.extraEnv | toYaml | nindent 10 }} diff --git a/charts/v2/airbyte/templates/airbyte-webapp/deployment.yaml b/charts/v2/airbyte/templates/airbyte-webapp/deployment.yaml index 8f3850fb5a1..91d132368e8 100644 --- a/charts/v2/airbyte/templates/airbyte-webapp/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-webapp/deployment.yaml @@ -56,43 +56,12 @@ spec: image: {{ include "imageUrl" (list .Values.webapp.image $) }} imagePullPolicy: "{{ .Values.webapp.image.pullPolicy }}" env: - {{- if eq .Values.global.deploymentMode "oss" }} - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TRACKING_STRATEGY - - name: AIRBYTE_SERVER_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_SERVER_HOST - - name: KEYCLOAK_INTERNAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: KEYCLOAK_INTERNAL_HOST - - name: CONNECTOR_BUILDER_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONNECTOR_BUILDER_API_HOST - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: API_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: API_URL - - name: CONNECTOR_BUILDER_API_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONNECTOR_BUILDER_API_URL - {{- end }} + + {{ include "airbyte.common.envs" . | nindent 8 }} + {{ include "airbyte.keycloak.admin.client.internalHost.env" . | nindent 8 }} + {{ include "airbyte.tracking.envs" . | nindent 8 }} + {{ include "airbyte.webapp.envs" . | nindent 8 }} + # Values from secret {{- if .Values.webapp.secrets }} {{- range $k, $v := .Values.webapp.secrets }} @@ -105,8 +74,8 @@ spec: {{- end }} # Values from env - {{- if or .Values.webapp.envVars .Values.global.env_vars }} - {{- range $k, $v := mergeOverwrite .Values.webapp.envVars .Values.global.env_vars }} + {{- if or .Values.webapp.env_vars .Values.global.env_vars }} + {{- range $k, $v := mergeOverwrite .Values.webapp.env_vars .Values.global.env_vars }} - name: {{ $k }} value: {{ $v | quote }} {{- end }} diff --git a/charts/v2/airbyte/templates/airbyte-worker/deployment.yaml b/charts/v2/airbyte/templates/airbyte-worker/deployment.yaml index 63cf5574764..5bd52492a18 100644 --- a/charts/v2/airbyte/templates/airbyte-worker/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-worker/deployment.yaml @@ -60,317 +60,27 @@ spec: - name: JAVA_TOOL_OPTIONS value: "-Xdebug -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:{{ .Values.worker.debug.remoteDebugPort }},server=y,suspend=n" {{- end}} - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: CONFIG_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONFIG_ROOT - {{- if eq (lower (default "" .Values.global.storage.type)) "gcs" }} - - name: CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH - value: "/secrets/gcs-log-creds" - - name: CONTAINER_ORCHESTRATOR_SECRET_NAME - value: {{ include "airbyte.secretStoreName" .Values.global.storage.storageSecretName }} - {{- end }} - {{- include "airbyte.logging.envs" . | nindent 8 }} - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TRACKING_STRATEGY - - name: WORKSPACE_DOCKER_MOUNT - value: workspace - - name: WORKSPACE_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKSPACE_ROOT - - name: LOCAL_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: LOCAL_ROOT - - name: WEBAPP_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WEBAPP_URL - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_HOST - - name: TEMPORAL_WORKER_PORTS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_WORKER_PORTS - - name: JOB_KUBE_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: JOB_KUBE_SERVICEACCOUNT - value: {{ .Values.global.serviceAccountName }} - {{- if $.Values.global.jobs.kube.annotations }} - - name: JOB_KUBE_ANNOTATIONS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_ANNOTATIONS - {{- end }} - {{- if $.Values.global.jobs.kube.labels }} - - name: JOB_KUBE_LABELS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_LABELS - {{- end }} - {{- if $.Values.global.jobs.kube.nodeSelector }} - - name: JOB_KUBE_NODE_SELECTORS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_NODE_SELECTORS - {{- end }} - {{- if $.Values.global.jobs.kube.tolerations }} - - name: JOB_KUBE_TOLERATIONS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_TOLERATIONS - {{- end }} - - name: JOB_KUBE_BUSYBOX_IMAGE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_BUSYBOX_IMAGE - - name: JOB_KUBE_CURL_IMAGE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_CURL_IMAGE - {{- if $.Values.global.jobs.kube.main_container_image_pull_secret }} - - name: JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET - {{- end }} - - name: JOB_MAIN_CONTAINER_CPU_REQUEST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_REQUEST - - name: JOB_MAIN_CONTAINER_CPU_LIMIT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_LIMIT - - name: JOB_MAIN_CONTAINER_MEMORY_REQUEST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_REQUEST - - name: JOB_MAIN_CONTAINER_MEMORY_LIMIT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_LIMIT - - name: INTERNAL_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: INTERNAL_API_HOST - - name: WORKLOAD_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKLOAD_API_HOST - - name: WORKLOAD_API_BEARER_TOKEN - valueFrom: - secretKeyRef: - name: {{ index .Values.worker "workloadApi" "bearerTokenSecretName" | default (printf "%s-airbyte-secrets" .Release.Name ) }} - key: {{ index .Values.worker "workloadApi" "bearerTokenSecretKey" | default "WORKLOAD_API_BEARER_TOKEN" }} - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: METRIC_CLIENT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: METRIC_CLIENT - - name: OTEL_COLLECTOR_ENDPOINT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: OTEL_COLLECTOR_ENDPOINT - - name: ACTIVITY_MAX_ATTEMPT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: ACTIVITY_MAX_ATTEMPT - - name: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS - - name: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS - - name: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - - name: SHOULD_RUN_NOTIFY_WORKFLOWS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SHOULD_RUN_NOTIFY_WORKFLOWS - - name: MICRONAUT_ENVIRONMENTS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKERS_MICRONAUT_ENVIRONMENTS - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - - name: AIRBYTE_API_AUTH_HEADER_NAME - value: "X-Airbyte-Auth" - - name: AIRBYTE_API_AUTH_HEADER_VALUE - value: "Internal worker" - {{- else if and (eq .Values.global.deploymentMode "oss") .Values.global.auth.enabled }} - # Self-Managed Enterprise and Community w/ auth enabled use the same auth header, just - # splitting into two separate blocks for readability. - - name: AIRBYTE_API_AUTH_HEADER_NAME - value: "X-Airbyte-Auth" + - name: AIRBYTE_API_AUTH_HEADER_VALUE value: "Internal worker" - {{- end }} - # SECRETS MANAGER - - name: SECRET_PERSISTENCE - value: {{ include "airbyte.secretPersistence" . }} - # Values for awsSecretManager - {{- if eq ((((.Values.global).secretsManager).awsSecretManager).authenticationType) "credentials" }} - - name: AWS_SECRET_MANAGER_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.awsSecretManagerAccessKeyIdSecretKey" .Values.global.secretsManager.awsSecretManager.accessKeyIdSecretKey }} - - name: AWS_SECRET_MANAGER_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.awsSecretManagerSecretAccessKeySecretKey" .Values.global.secretsManager.awsSecretManager.secretAccessKeySecretKey }} - {{- end }} - {{- if ((((.Values.global).secretsManager).awsSecretManager).region) }} - - name: AWS_SECRET_MANAGER_REGION - value: {{ (((.Values.global).secretsManager).awsSecretManager).region }} - {{- end }} - - {{- if ((((.Values.global).secretsManager).awsSecretManager).tags) }} - - name: AWS_SECRET_MANAGER_SECRET_TAGS - value: {{ include "airbyte.tagsToString" .Values.global.secretsManager.awsSecretManager.tags }} - {{- end }} - {{- if ((((.Values.global).secretsManager).awsSecretManager).kms) }} - - name: AWS_KMS_KEY_ARN - value: {{ ((((.Values.global).secretsManager).awsSecretManager).kms) | default "" }} - {{- end }} - - # Values for Azure Key Vault - {{- if (((.Values.global).secretsManager).azureKeyVault) }} - - - name: AB_AZURE_KEY_VAULT_VAULT_URL - value: {{ (((.Values.global).secretsManager).azureKeyVault).vaultUrl }} - - - name: AB_AZURE_KEY_VAULT_TENANT_ID - value: {{ (((.Values.global).secretsManager).azureKeyVault).tenantId }} - - - name: AB_AZURE_KEY_VAULT_CLIENT_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.azureKeyVaultClientIdSecretKey" .Values.global.secretsManager.azureKeyVault.clientIdSecretKey }} - - name: AB_AZURE_KEY_VAULT_CLIENT_SECRET - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.azureKeyVaultClientSecretSecretKey" .Values.global.secretsManager.azureKeyVault.clientSecretSecretKey }} - - {{- end }} - {{- if ((((.Values.global).secretsManager).azureKeyVault).tags) }} - - name: AB_AZURE_KEY_VAULT_TAGS - value: {{ include "airbyte.tagsToString" .Values.global.secretsManager.azureKeyVault.tags }} - {{- end }} - - # Values for googleSecretManager secrets - {{- if (((.Values.global).secretsManager).googleSecretManager) }} - - name: SECRET_STORE_GCP_PROJECT_ID - value: {{ .Values.global.secretsManager.googleSecretManager.projectId }} - - name: SECRET_STORE_GCP_CREDENTIALS - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.googleSecretManagerCredentialsSecretKey" .Values.global.secretsManager.googleSecretManager.credentialsSecretKey }} - {{- end }} - - # Values for vault secrets - {{- if (((.Values.global).secretsManager).vault) }} - - name: VAULT_ADDRESS - value: {{ (((.Values.global).secretsManager).vault).address }} - - name: VAULT_PREFIX - value: {{ (((.Values.global).secretsManager).vault).prefix }} - - name: VAULT_AUTH_TOKEN - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.vaultAuthTokenSecretKey" .Values.global.secretsManager.vault.authTokenSecretKey }} - {{- end }} - - # Storage - {{- include "airbyte.storage.envs" . | nindent 8 }} - - # Database + {{- include "airbyte.common.envs" . | nindent 8 }} {{- include "airbyte.database.envs" . | nindent 8 }} - + {{- include "airbyte.database.migrations.envs" . | nindent 8 }} + {{- include "airbyte.jobs.envs" . | nindent 8 }} + {{- include "airbyte.logging.envs" . | nindent 8 }} + {{- include "airbyte.metrics.envs" . | nindent 8 }} + {{- include "airbyte.micronaut.envs" . | nindent 8 }} + {{- include "airbyte.secretsManager.envs" . | nindent 8 }} + {{- include "airbyte.storage.envs" . | nindent 8 }} + {{- include "airbyte.temporal.envs" . | nindent 8 }} + {{- include "airbyte.temporal.worker.envs" . | nindent 8 }} + {{- include "airbyte.tracking.envs" . | nindent 8 }} + {{- include "airbyte.worker.envs" . | nindent 8 }} + {{- include "airbyte.workloadApiServer.envs" . | nindent 8 }} + + {{- if (eq .Values.global.edition "enterprise") }} + {{- include "airbyte.enterprise.envs" . | nindent 8 }} {{- end }} # Values from secret diff --git a/charts/v2/airbyte/templates/airbyte-workload-api-server/deployment.yaml b/charts/v2/airbyte/templates/airbyte-workload-api-server/deployment.yaml index 3c24e4c7999..3f4656dcbac 100644 --- a/charts/v2/airbyte/templates/airbyte-workload-api-server/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-workload-api-server/deployment.yaml @@ -62,55 +62,15 @@ spec: - name: JAVA_TOOL_OPTIONS value: "-Xdebug -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:{{ .Values.workloadApiServer.debug.remoteDebugPort }},server=y,suspend=n" {{- end }} - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - - name: INTERNAL_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: INTERNAL_API_HOST - - name: AIRBYTE_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_API_HOST - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_PORT - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_HOST - - name: WORKLOAD_API_BEARER_TOKEN - valueFrom: - secretKeyRef: - name: {{ .Values.workloadApiServer.workloadApi.bearerTokenSecretName | default (printf "%s-airbyte-secrets" .Release.Name ) }} - key: {{ .Values.workloadApiServer.workloadApi.bearerTokenSecretKey | default "WORKLOAD_API_BEARER_TOKEN" }} - # Database + + {{- include "airbyte.common.envs" . | nindent 8 }} {{- include "airbyte.database.envs" . | nindent 8 }} + {{- include "airbyte.metrics.envs" . | nindent 8 }} + {{- include "airbyte.temporal.envs" . | nindent 8 }} + {{- include "airbyte.workloadApiServer.envs" . | nindent 8 }} + {{- if (eq .Values.global.edition "enterprise") }} + {{- include "airbyte.enterprise.envs" . | nindent 8 }} {{- end }} # Values from secret diff --git a/charts/v2/airbyte/templates/airbyte-workload-launcher/deployment.yaml b/charts/v2/airbyte/templates/airbyte-workload-launcher/deployment.yaml index a67c184672f..604402f9600 100644 --- a/charts/v2/airbyte/templates/airbyte-workload-launcher/deployment.yaml +++ b/charts/v2/airbyte/templates/airbyte-workload-launcher/deployment.yaml @@ -62,397 +62,33 @@ spec: - name: JAVA_TOOL_OPTIONS value: "-Xdebug -agentlib:jdwp=transport=dt_socket,address=0.0.0.0:{{ .Values.workloadLauncher.debug.remoteDebugPort }},server=y,suspend=n" {{- end}} - {{- if eq .Values.global.deploymentMode "oss" }} - - name: AIRBYTE_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: AIRBYTE_VERSION - name: DATA_PLANE_ID value: "local" - - name: PUB_SUB_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: PUB_SUB_ENABLED - - name: PUB_SUB_TOPIC_NAME - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: PUB_SUB_TOPIC_NAME - - name: CONFIG_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONFIG_ROOT - {{- if eq (lower (default "" .Values.global.storage.type)) "gcs" }} - - name: CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH - value: "/secrets/gcs-log-creds" - - name: CONTAINER_ORCHESTRATOR_SECRET_NAME - value: {{ include "airbyte.secretStoreName" .Values.global.storage.storageSecretName }} - {{- end }} - {{- include "airbyte.database.envs" . | nindent 8 }} - {{- include "airbyte.logging.envs" . | nindent 8 }} - - name: MICROMETER_METRICS_ENABLED - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_ENABLED - - name: MICROMETER_METRICS_STATSD_FLAVOR - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MICROMETER_METRICS_STATSD_FLAVOR - - name: SEGMENT_WRITE_KEY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SEGMENT_WRITE_KEY - - name: STATSD_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_HOST - - name: STATSD_PORT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: STATSD_PORT - - name: TRACKING_STRATEGY - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TRACKING_STRATEGY - - name: WORKSPACE_DOCKER_MOUNT - value: workspace - - name: WORKSPACE_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKSPACE_ROOT - - name: LOCAL_ROOT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: LOCAL_ROOT - - name: WEBAPP_URL - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WEBAPP_URL - - name: WORKLOAD_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKLOAD_API_HOST - - name: TEMPORAL_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_HOST - - name: TEMPORAL_WORKER_PORTS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: TEMPORAL_WORKER_PORTS - - name: JOB_KUBE_NAMESPACE - valueFrom: - fieldRef: - fieldPath: metadata.namespace - - name: JOB_KUBE_SERVICEACCOUNT - value: {{ .Values.global.serviceAccountName }} - {{- if $.Values.global.jobs.kube.annotations }} - - name: JOB_KUBE_ANNOTATIONS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_ANNOTATIONS - {{- end }} - {{- if $.Values.global.jobs.kube.labels }} - - name: JOB_KUBE_LABELS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_LABELS - {{- end }} - {{- if $.Values.global.jobs.kube.nodeSelector }} - - name: JOB_KUBE_NODE_SELECTORS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_NODE_SELECTORS - {{- end }} - {{- if $.Values.global.jobs.kube.tolerations }} - - name: JOB_KUBE_TOLERATIONS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_TOLERATIONS - {{- end }} - - name: CONNECTOR_SIDECAR_IMAGE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONNECTOR_SIDECAR_IMAGE - - name: WORKLOAD_INIT_IMAGE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKLOAD_INIT_IMAGE - - name: CONTAINER_ORCHESTRATOR_IMAGE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONTAINER_ORCHESTRATOR_IMAGE - - name: JOB_KUBE_BUSYBOX_IMAGE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_BUSYBOX_IMAGE - - name: JOB_KUBE_CURL_IMAGE - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_CURL_IMAGE - {{- if $.Values.global.jobs.kube.main_container_image_pull_secret }} - - name: JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET - {{- end }} - - name: JOB_MAIN_CONTAINER_CPU_REQUEST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_REQUEST - - name: JOB_MAIN_CONTAINER_CPU_LIMIT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_CPU_LIMIT - - name: JOB_MAIN_CONTAINER_MEMORY_REQUEST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_REQUEST - - name: JOB_MAIN_CONTAINER_MEMORY_LIMIT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOB_MAIN_CONTAINER_MEMORY_LIMIT - - name: KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS - - name: WORKLOAD_API_BEARER_TOKEN - valueFrom: - secretKeyRef: - name: {{ index .Values.workloadLauncher "workloadApi" "bearerTokenSecretName" | default (printf "%s-airbyte-secrets" .Release.Name ) }} - key: {{ index .Values.workloadLauncher "workloadApi" "bearerTokenSecretKey" | default "WORKLOAD_API_BEARER_TOKEN" }} - - name: WORKLOAD_API_BEARER_TOKEN_SECRET_NAME - value: {{ index .Values.workloadLauncher "workloadApi" "bearerTokenSecretName" | default (printf "%s-airbyte-secrets" .Release.Name ) }} - - name: WORKLOAD_API_BEARER_TOKEN_SECRET_KEY - value: {{ index .Values.workloadLauncher "workloadApi" "bearerTokenSecretKey" | default "WORKLOAD_API_BEARER_TOKEN" }} - - name: INTERNAL_API_HOST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: INTERNAL_API_HOST - - name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION - - name: METRIC_CLIENT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: METRIC_CLIENT - - name: OTEL_COLLECTOR_ENDPOINT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: OTEL_COLLECTOR_ENDPOINT - - name: ACTIVITY_MAX_ATTEMPT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: ACTIVITY_MAX_ATTEMPT - - name: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS - - name: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS - - name: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS - - name: SHOULD_RUN_NOTIFY_WORKFLOWS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: SHOULD_RUN_NOTIFY_WORKFLOWS - - name: MICRONAUT_ENVIRONMENTS - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: LAUNCHER_MICRONAUT_ENVIRONMENTS - - name: WORKLOAD_LAUNCHER_PARALLELISM - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: WORKLOAD_LAUNCHER_PARALLELISM - - name: FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT - - name: FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - - name: AIRBYTE_API_AUTH_HEADER_NAME - value: "X-Airbyte-Auth" - - name: AIRBYTE_API_AUTH_HEADER_VALUE - value: "Internal worker" - {{- else if and (eq .Values.global.deploymentMode "oss") .Values.global.auth.enabled }} - # Self-Managed Enterprise and Community w/ auth enabled use the same auth header, just - # splitting into two separate blocks for readability. - - name: AIRBYTE_API_AUTH_HEADER_NAME - value: "X-Airbyte-Auth" - name: AIRBYTE_API_AUTH_HEADER_VALUE value: "Internal worker" - {{- end }} - - # SECRETS MANAGER - - name: SECRET_PERSISTENCE - value: {{ include "airbyte.secretPersistence" . }} - # Values for AwsSecretsManager - {{- if eq ((((.Values.global).secretsManager).awsSecretManager).authenticationType) "credentials" }} - - name: AWS_SECRET_MANAGER_ACCESS_KEY_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.awsSecretManagerAccessKeyIdSecretKey" .Values.global.secretsManager.awsSecretManager.accessKeyIdSecretKey }} - - name: AWS_SECRET_MANAGER_SECRET_ACCESS_KEY - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.awsSecretManagerSecretAccessKeySecretKey" .Values.global.secretsManager.awsSecretManager.secretAccessKeySecretKey }} - - - name: AWS_SECRET_MANAGER_ACCESS_KEY_ID_REF_NAME - value: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - - name: AWS_SECRET_MANAGER_ACCESS_KEY_ID_REF_KEY - value: {{ include "airbyte.awsSecretManagerAccessKeyIdSecretKey" .Values.global.secretsManager.awsSecretManager.accessKeyIdSecretKey }} - - - name: AWS_SECRET_MANAGER_SECRET_ACCESS_KEY_REF_NAME - value: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - - name: AWS_SECRET_MANAGER_SECRET_ACCESS_KEY_REF_KEY - value: {{ include "airbyte.awsSecretManagerSecretAccessKeySecretKey" .Values.global.secretsManager.awsSecretManager.secretAccessKeySecretKey }} - - {{- end }} - {{- if ((((.Values.global).secretsManager).awsSecretManager).region) }} - - name: AWS_SECRET_MANAGER_REGION - value: {{ (((.Values.global).secretsManager).awsSecretManager).region }} - {{- end }} - - {{- if ((((.Values.global).secretsManager).awsSecretManager).tags) }} - - name: AWS_SECRET_MANAGER_SECRET_TAGS - value: {{ include "airbyte.tagsToString" .Values.global.secretsManager.awsSecretManager.tags }} - {{- end }} - {{- if ((((.Values.global).secretsManager).awsSecretManager).kms) }} - - name: AWS_KMS_KEY_ARN - value: {{ ((((.Values.global).secretsManager).awsSecretManager).kms) | default "" }} - {{- end }} - - # Values for Azure Key Vault - {{- if (((.Values.global).secretsManager).azureKeyVault) }} - - name: AB_AZURE_KEY_VAULT_VAULT_URL - value: {{ (((.Values.global).secretsManager).azureKeyVault).vaultUrl }} - - - name: AB_AZURE_KEY_VAULT_TENANT_ID - value: {{ (((.Values.global).secretsManager).azureKeyVault).tenantId }} - - - name: AB_AZURE_KEY_VAULT_CLIENT_ID - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.azureKeyVaultClientIdSecretKey" .Values.global.secretsManager.azureKeyVault.clientIdSecretKey }} - - name: AB_AZURE_KEY_VAULT_CLIENT_SECRET - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.azureKeyVaultClientSecretSecretKey" .Values.global.secretsManager.azureKeyVault.clientSecretSecretKey }} - - - name: AB_AZURE_KEY_VAULT_CLIENT_ID_REF_NAME - value: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - - name: AB_AZURE_KEY_VAULT_CLIENT_ID_REF_REF_KEY - value: {{ include "airbyte.azureKeyVaultClientIdSecretKey" .Values.global.secretsManager.azureKeyVault.clientIdSecretKey }} - - - name: AB_AZURE_KEY_VAULT_CLIENT_SECRET_REF_REF_NAME - value: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - - name: AB_AZURE_KEY_VAULT_CLIENT_SECRET_REF_REF_KEY - value: {{ include "airbyte.azureKeyVaultClientSecretSecretKey" .Values.global.secretsManager.azureKeyVault.clientSecretSecretKey }} - - {{- end }} - - {{- if ((((.Values.global).secretsManager).azureKeyVault).tags) }} - - name: AB_AZURE_KEY_VAULT_TAGS - value: {{ include "airbyte.tagsToString" .Values.global.secretsManager.azureKeyVault.tags }} - {{- end }} - - # Values for googleSecretManager secrets - {{- if (((.Values.global).secretsManager).googleSecretManager) }} - - name: SECRET_STORE_GCP_PROJECT_ID - value: {{ .Values.global.secretsManager.googleSecretManager.projectId }} - - name: SECRET_STORE_GCP_CREDENTIALS - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.googleSecretManagerCredentialsSecretKey" .Values.global.secretsManager.googleSecretManager.credentialsSecretKey }} - - name: SECRET_STORE_GCP_SECRET_NAME - value: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - - name: SECRET_STORE_GCP_SECRET_KEY - value: {{ include "airbyte.googleSecretManagerCredentialsSecretKey" .Values.global.secretsManager.googleSecretManager.credentialsSecretKey }} - {{- end }} - - # Values for vault secrets - {{- if (((.Values.global).secretsManager).vault) }} - - name: VAULT_ADDRESS - value: {{ (((.Values.global).secretsManager).vault).address }} - - name: VAULT_PREFIX - value: {{ (((.Values.global).secretsManager).vault).prefix }} - - name: VAULT_AUTH_TOKEN - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - key: {{ include "airbyte.vaultAuthTokenSecretKey" .Values.global.secretsManager.vault.authTokenSecretKey }} - - name: VAULT_AUTH_TOKEN_REF_NAME - value: {{ include "airbyte.secretStoreName" .Values.global.secretsManager.secretsManagerSecretName }} - - name: VAULT_AUTH_TOKEN_REF_KEY - value: {{ include "airbyte.vaultAuthTokenSecretKey" .Values.global.secretsManager.vault.authTokenSecretKey }} - {{- end }} - - # Storage + {{- include "airbyte.common.envs" . | nindent 8 }} + {{- include "airbyte.database.envs" . | nindent 8 }} + {{- include "airbyte.database.migrations.envs" . | nindent 8 }} + {{- include "airbyte.jobs.envs" . | nindent 8 }} + {{- include "airbyte.logging.envs" . | nindent 8 }} + {{- include "airbyte.metrics.envs" . | nindent 8 }} + {{- include "airbyte.micronaut.envs" . | nindent 8 }} {{- include "airbyte.storage.envs" . | nindent 8 }} - + {{- include "airbyte.secretsManager.envs" . | nindent 8 }} + {{- include "airbyte.temporal.envs" . | nindent 8 }} + {{- include "airbyte.temporal.worker.envs" . | nindent 8 }} + {{- include "airbyte.tracking.envs" . | nindent 8 }} + {{- include "airbyte.worker.envs" . | nindent 8 }} + {{- include "airbyte.workloadApiServer.envs" . | nindent 8 }} + {{- include "airbyte.workloads.envs" . | nindent 8 }} + {{- include "airbyte.workloadLauncher.envs" . | nindent 8 }} + {{- include "airbyte.workloadLauncher.images.envs" . | nindent 8 }} + {{- include "airbyte.workloads.resources.envs" . | nindent 8 }} + + {{- if (eq .Values.global.edition "enterprise") }} + {{- include "airbyte.enterprise.envs" . | nindent 8 }} {{- end }} # Values from secret diff --git a/charts/v2/airbyte/templates/config/_auth.tpl b/charts/v2/airbyte/templates/config/_auth.tpl new file mode 100644 index 00000000000..51a36e20a71 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_auth.tpl @@ -0,0 +1,664 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Auth Configuration +*/}} + +{{/* +Renders the auth secret name +*/}} +{{- define "airbyte.auth.secretName" }} +{{- if .Values.global.auth.secretName }} + {{- .Values.global.auth.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.password secret key +*/}} +{{- define "airbyte.auth.instanceAdmin.password.secretKey" }} + {{- .Values.global.auth.instanceAdmin.passwordSecretKey | default "INSTANCE_ADMIN_PASSWORD" }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.password environment variable +*/}} +{{- define "airbyte.auth.instanceAdmin.password.env" }} +- name: AB_INSTANCE_ADMIN_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.bootstrap.managedSecretName" . }} + key: {{ include "airbyte.auth.instanceAdmin.password.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all auth environment variables +*/}} +{{- define "airbyte.auth.envs" }} +{{- include "airbyte.auth.instanceAdmin.password.env" . }} +{{- end }} + +{{/* +Renders the auth.bootstrap secret name +*/}} +{{- define "airbyte.auth.bootstrap.secretName" }} +{{- if .Values.global.auth.secretName }} + {{- .Values.global.auth.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.auth.secretCreationEnabled value +*/}} +{{- define "airbyte.auth.bootstrap.secretCreationEnabled" }} + {{- .Values.global.auth.secretCreationEnabled | default true }} +{{- end }} + +{{/* +Renders the auth.bootstrap.secretCreationEnabled environment variable +*/}} +{{- define "airbyte.auth.bootstrap.secretCreationEnabled.env" }} +- name: AB_AUTH_SECRET_CREATION_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_AUTH_SECRET_CREATION_ENABLED +{{- end }} + +{{/* +Renders the global.auth.managedSecretName value +*/}} +{{- define "airbyte.auth.bootstrap.managedSecretName" }} + {{- .Values.global.auth.managedSecretName | default "airbyte-auth-secrets" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.managedSecretName environment variable +*/}} +{{- define "airbyte.auth.bootstrap.managedSecretName.env" }} +- name: AB_KUBERNETES_SECRET_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_KUBERNETES_SECRET_NAME +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.password value +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.password" }} + {{- .Values.global.auth.instanceAdmin.password }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.password secret key +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.password.secretKey" }} + {{- .Values.global.auth.instanceAdmin.passwordSecretKey | default "AB_INSTANCE_ADMIN_PASSWORD" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.password environment variable +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.password.env" }} +- name: AB_INSTANCE_ADMIN_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.bootstrap.secretName" . }} + key: {{ include "airbyte.auth.bootstrap.instanceAdmin.password.secretKey" . }} +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.passwordSecretKey value +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.passwordSecretKey" }} + {{- .Values.global.auth.instanceAdmin.passwordSecretKey | default "INSTANCE_ADMIN_PASSWORD" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.passwordSecretKey environment variable +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.passwordSecretKey.env" }} +- name: AB_INSTANCE_ADMIN_PASSWORD_SECRET_KEY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_INSTANCE_ADMIN_PASSWORD_SECRET_KEY +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.cilentId value +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.cilentId" }} + {{- .Values.global.auth.instanceAdmin.cilentId }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.cilentId secret key +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.cilentId.secretKey" }} + {{- .Values.global.auth.instanceAdmin.cilentIdSecretKey | default "AB_INSTANCE_ADMIN_CLIENT_ID" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.cilentId environment variable +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.cilentId.env" }} +- name: AB_INSTANCE_ADMIN_CLIENT_ID + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.bootstrap.secretName" . }} + key: {{ include "airbyte.auth.bootstrap.instanceAdmin.cilentId.secretKey" . }} +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.clientIdSecretKey value +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.clientIdSecretKey" }} + {{- .Values.global.auth.instanceAdmin.clientIdSecretKey | default "INSTANCE_ADMIN_CLIENT_ID" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.clientIdSecretKey environment variable +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.clientIdSecretKey.env" }} +- name: AB_INSTANCE_ADMIN_CLIENT_ID_SECRET_KEY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_INSTANCE_ADMIN_CLIENT_ID_SECRET_KEY +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.clientSecret value +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.clientSecret" }} + {{- .Values.global.auth.instanceAdmin.clientSecret }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.clientSecret secret key +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.clientSecret.secretKey" }} + {{- .Values.global.auth.instanceAdmin.clientSecretSecretKey | default "AB_INSTANCE_ADMIN_CLIENT_SECRET" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.clientSecret environment variable +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.clientSecret.env" }} +- name: AB_INSTANCE_ADMIN_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.bootstrap.secretName" . }} + key: {{ include "airbyte.auth.bootstrap.instanceAdmin.clientSecret.secretKey" . }} +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.clientSecretSecretKey value +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.clientSecretSecretKey" }} + {{- .Values.global.auth.instanceAdmin.clientSecretSecretKey | default "INSTANCE_ADMIN_CLIENT_SECRET" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.instanceAdmin.clientSecretSecretKey environment variable +*/}} +{{- define "airbyte.auth.bootstrap.instanceAdmin.clientSecretSecretKey.env" }} +- name: AB_INSTANCE_ADMIN_CLIENT_SECRET_SECRET_KEY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_INSTANCE_ADMIN_CLIENT_SECRET_SECRET_KEY +{{- end }} + +{{/* +Renders the global.auth.security.jwtSignatureSecret value +*/}} +{{- define "airbyte.auth.bootstrap.security.jwtSignatureSecret" }} + {{- .Values.global.auth.security.jwtSignatureSecret }} +{{- end }} + +{{/* +Renders the auth.bootstrap.security.jwtSignatureSecret secret key +*/}} +{{- define "airbyte.auth.bootstrap.security.jwtSignatureSecret.secretKey" }} + {{- .Values.global.auth.security.jwtSignatureSecretSecretKey | default "AB_JWT_SIGNATURE_SECRET" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.security.jwtSignatureSecret environment variable +*/}} +{{- define "airbyte.auth.bootstrap.security.jwtSignatureSecret.env" }} +- name: AB_JWT_SIGNATURE_SECRET + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.bootstrap.secretName" . }} + key: {{ include "airbyte.auth.bootstrap.security.jwtSignatureSecret.secretKey" . }} +{{- end }} + +{{/* +Renders the global.auth.security.jwtSignatureSecretKey value +*/}} +{{- define "airbyte.auth.bootstrap.security.jwtSignatureSecretKey" }} + {{- .Values.global.auth.security.jwtSignatureSecretKey | default "JWT_SIGNATURE_SECRET" }} +{{- end }} + +{{/* +Renders the auth.bootstrap.security.jwtSignatureSecretKey environment variable +*/}} +{{- define "airbyte.auth.bootstrap.security.jwtSignatureSecretKey.env" }} +- name: AB_JWT_SIGNATURE_SECRET_KEY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_JWT_SIGNATURE_SECRET_KEY +{{- end }} + +{{/* +Renders the set of all auth.bootstrap environment variables +*/}} +{{- define "airbyte.auth.bootstrap.envs" }} +{{- include "airbyte.auth.bootstrap.secretCreationEnabled.env" . }} +{{- include "airbyte.auth.bootstrap.managedSecretName.env" . }} +{{- include "airbyte.auth.bootstrap.instanceAdmin.password.env" . }} +{{- include "airbyte.auth.bootstrap.instanceAdmin.passwordSecretKey.env" . }} +{{- include "airbyte.auth.bootstrap.instanceAdmin.cilentId.env" . }} +{{- include "airbyte.auth.bootstrap.instanceAdmin.clientIdSecretKey.env" . }} +{{- include "airbyte.auth.bootstrap.instanceAdmin.clientSecret.env" . }} +{{- include "airbyte.auth.bootstrap.instanceAdmin.clientSecretSecretKey.env" . }} +{{- include "airbyte.auth.bootstrap.security.jwtSignatureSecret.env" . }} +{{- include "airbyte.auth.bootstrap.security.jwtSignatureSecretKey.env" . }} +{{- end }} + +{{/* +Renders the set of all auth.bootstrap config map variables +*/}} +{{- define "airbyte.auth.bootstrap.configVars" }} +AB_AUTH_SECRET_CREATION_ENABLED: {{ include "airbyte.auth.bootstrap.secretCreationEnabled" . | quote }} +AB_KUBERNETES_SECRET_NAME: {{ include "airbyte.auth.bootstrap.managedSecretName" . | quote }} +AB_INSTANCE_ADMIN_PASSWORD_SECRET_KEY: {{ include "airbyte.auth.bootstrap.instanceAdmin.passwordSecretKey" . | quote }} +AB_INSTANCE_ADMIN_CLIENT_ID_SECRET_KEY: {{ include "airbyte.auth.bootstrap.instanceAdmin.clientIdSecretKey" . | quote }} +AB_INSTANCE_ADMIN_CLIENT_SECRET_SECRET_KEY: {{ include "airbyte.auth.bootstrap.instanceAdmin.clientSecretSecretKey" . | quote }} +AB_JWT_SIGNATURE_SECRET_KEY: {{ include "airbyte.auth.bootstrap.security.jwtSignatureSecretKey" . | quote }} +{{- end }} + +{{/* +Renders the set of all auth.bootstrap secret variables +*/}} +{{- define "airbyte.auth.bootstrap.secrets" }} +AB_INSTANCE_ADMIN_PASSWORD: {{ include "airbyte.auth.bootstrap.instanceAdmin.password" . | quote }} +AB_INSTANCE_ADMIN_CLIENT_ID: {{ include "airbyte.auth.bootstrap.instanceAdmin.cilentId" . | quote }} +AB_INSTANCE_ADMIN_CLIENT_SECRET: {{ include "airbyte.auth.bootstrap.instanceAdmin.clientSecret" . | quote }} +AB_JWT_SIGNATURE_SECRET: {{ include "airbyte.auth.bootstrap.security.jwtSignatureSecret" . | quote }} +{{- end }} + +{{/* +Renders the auth.identityProvider secret name +*/}} +{{- define "airbyte.auth.identityProvider.secretName" }} +{{- if .Values.global.auth.identityProvider.secretName }} + {{- .Values.global.auth.identityProvider.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.auth.identityProvider.type value +*/}} +{{- define "airbyte.auth.identityProvider.type" }} + {{- .Values.global.auth.identityProvider.type }} +{{- end }} + +{{/* +Renders the auth.identityProvider.type environment variable +*/}} +{{- define "airbyte.auth.identityProvider.type.env" }} +- name: IDENTITY_PROVIDER_TYPE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: IDENTITY_PROVIDER_TYPE +{{- end }} + +{{/* +Renders the global.auth.identityProvider.oidc.domain value +*/}} +{{- define "airbyte.auth.identityProvider.oidc.domain" }} + {{- .Values.global.auth.identityProvider.oidc.domain }} +{{- end }} + +{{/* +Renders the auth.identityProvider.oidc.domain environment variable +*/}} +{{- define "airbyte.auth.identityProvider.oidc.domain.env" }} +- name: OIDC_DOMAIN + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OIDC_DOMAIN +{{- end }} + +{{/* +Renders the global.auth.identityProvider.oidc.appName value +*/}} +{{- define "airbyte.auth.identityProvider.oidc.appName" }} + {{- .Values.global.auth.identityProvider.oidc.appName }} +{{- end }} + +{{/* +Renders the auth.identityProvider.oidc.appName environment variable +*/}} +{{- define "airbyte.auth.identityProvider.oidc.appName.env" }} +- name: OIDC_APP_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OIDC_APP_NAME +{{- end }} + +{{/* +Renders the global.auth.identityProvider.oidc.clientId value +*/}} +{{- define "airbyte.auth.identityProvider.oidc.clientId" }} + {{- .Values.global.auth.identityProvider.oidc.clientId }} +{{- end }} + +{{/* +Renders the auth.identityProvider.oidc.clientId secret key +*/}} +{{- define "airbyte.auth.identityProvider.oidc.clientId.secretKey" }} + {{- .Values.global.auth.identityProvider.oidc.clientIdSecretKey | default "OIDC_CLIENT_ID" }} +{{- end }} + +{{/* +Renders the auth.identityProvider.oidc.clientId environment variable +*/}} +{{- define "airbyte.auth.identityProvider.oidc.clientId.env" }} +- name: OIDC_CLIENT_ID + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.identityProvider.secretName" . }} + key: {{ include "airbyte.auth.identityProvider.oidc.clientId.secretKey" . }} +{{- end }} + +{{/* +Renders the global.auth.identityProvider.oidc.clientSecret value +*/}} +{{- define "airbyte.auth.identityProvider.oidc.clientSecret" }} + {{- .Values.global.auth.identityProvider.oidc.clientSecret }} +{{- end }} + +{{/* +Renders the auth.identityProvider.oidc.clientSecret secret key +*/}} +{{- define "airbyte.auth.identityProvider.oidc.clientSecret.secretKey" }} + {{- .Values.global.auth.identityProvider.oidc.clientSecretSecretKey | default "OIDC_CLIENT_SECRET" }} +{{- end }} + +{{/* +Renders the auth.identityProvider.oidc.clientSecret environment variable +*/}} +{{- define "airbyte.auth.identityProvider.oidc.clientSecret.env" }} +- name: OIDC_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.identityProvider.secretName" . }} + key: {{ include "airbyte.auth.identityProvider.oidc.clientSecret.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all auth.identityProvider environment variables +*/}} +{{- define "airbyte.auth.identityProvider.envs" }} +{{- include "airbyte.auth.identityProvider.type.env" . }} +{{- include "airbyte.auth.identityProvider.oidc.domain.env" . }} +{{- include "airbyte.auth.identityProvider.oidc.appName.env" . }} +{{- include "airbyte.auth.identityProvider.oidc.clientId.env" . }} +{{- include "airbyte.auth.identityProvider.oidc.clientSecret.env" . }} +{{- end }} + +{{/* +Renders the set of all auth.identityProvider config map variables +*/}} +{{- define "airbyte.auth.identityProvider.configVars" }} +IDENTITY_PROVIDER_TYPE: {{ include "airbyte.auth.identityProvider.type" . | quote }} +OIDC_DOMAIN: {{ include "airbyte.auth.identityProvider.oidc.domain" . | quote }} +OIDC_APP_NAME: {{ include "airbyte.auth.identityProvider.oidc.appName" . | quote }} +{{- end }} + +{{/* +Renders the set of all auth.identityProvider secret variables +*/}} +{{- define "airbyte.auth.identityProvider.secrets" }} +OIDC_CLIENT_ID: {{ include "airbyte.auth.identityProvider.oidc.clientId" . | quote }} +OIDC_CLIENT_SECRET: {{ include "airbyte.auth.identityProvider.oidc.clientSecret" . | quote }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.enterprise secret name +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.secretName" }} +{{- if .Values.global.auth.instanceAdmin.secretName }} + {{- .Values.global.auth.instanceAdmin.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.firstName value +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.firstName" }} + {{- .Values.global.auth.instanceAdmin.firstName }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.enterprise.firstName environment variable +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.firstName.env" }} +- name: INITIAL_USER_FIRST_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: INITIAL_USER_FIRST_NAME +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.lastName value +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.lastName" }} + {{- .Values.global.auth.instanceAdmin.lastName }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.enterprise.lastName environment variable +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.lastName.env" }} +- name: INITIAL_USER_LAST_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: INITIAL_USER_LAST_NAME +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.email value +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.email" }} + {{- .Values.global.auth.instanceAdmin.email }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.enterprise.email secret key +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.email.secretKey" }} + {{- .Values.global.auth.instanceAdmin.emailSecretKey | default "INITIAL_USER_EMAIL" }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.enterprise.email environment variable +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.email.env" }} +- name: INITIAL_USER_EMAIL + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.instanceAdmin.enterprise.secretName" . }} + key: {{ include "airbyte.auth.instanceAdmin.enterprise.email.secretKey" . }} +{{- end }} + +{{/* +Renders the global.auth.instanceAdmin.password value +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.password" }} + {{- .Values.global.auth.instanceAdmin.password }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.enterprise.password secret key +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.password.secretKey" }} + {{- .Values.global.auth.instanceAdmin.passwordSecretKey | default "INITIAL_USER_PASSWORD" }} +{{- end }} + +{{/* +Renders the auth.instanceAdmin.enterprise.password environment variable +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.password.env" }} +- name: INITIAL_USER_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.instanceAdmin.enterprise.secretName" . }} + key: {{ include "airbyte.auth.instanceAdmin.enterprise.password.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all auth.instanceAdmin.enterprise environment variables +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.envs" }} +{{- include "airbyte.auth.instanceAdmin.enterprise.firstName.env" . }} +{{- include "airbyte.auth.instanceAdmin.enterprise.lastName.env" . }} +{{- include "airbyte.auth.instanceAdmin.enterprise.email.env" . }} +{{- include "airbyte.auth.instanceAdmin.enterprise.password.env" . }} +{{- end }} + +{{/* +Renders the set of all auth.instanceAdmin.enterprise config map variables +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.configVars" }} +INITIAL_USER_FIRST_NAME: {{ include "airbyte.auth.instanceAdmin.enterprise.firstName" . | quote }} +INITIAL_USER_LAST_NAME: {{ include "airbyte.auth.instanceAdmin.enterprise.lastName" . | quote }} +{{- end }} + +{{/* +Renders the set of all auth.instanceAdmin.enterprise secret variables +*/}} +{{- define "airbyte.auth.instanceAdmin.enterprise.secrets" }} +INITIAL_USER_EMAIL: {{ include "airbyte.auth.instanceAdmin.enterprise.email" . | quote }} +INITIAL_USER_PASSWORD: {{ include "airbyte.auth.instanceAdmin.enterprise.password" . | quote }} +{{- end }} + +{{/* +Renders the auth.jwt secret name +*/}} +{{- define "airbyte.auth.jwt.secretName" }} +{{- if .Values.global.auth.instanceAdmin.secretName }} + {{- .Values.global.auth.instanceAdmin.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the auth.jwt.jwtSignatureSecret secret key +*/}} +{{- define "airbyte.auth.jwt.jwtSignatureSecret.secretKey" }} + {{- .Values.global.auth.security.jwtSignatureSecretSecretKey | default "JWT_SIGNATURE_SECRET" }} +{{- end }} + +{{/* +Renders the auth.jwt.jwtSignatureSecret environment variable +*/}} +{{- define "airbyte.auth.jwt.jwtSignatureSecret.env" }} +- name: AB_JWT_SIGNATURE_SECRET + valueFrom: + secretKeyRef: + name: {{ include "airbyte.auth.bootstrap.managedSecretName" . }} + key: {{ include "airbyte.auth.jwt.jwtSignatureSecret.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all auth.jwt environment variables +*/}} +{{- define "airbyte.auth.jwt.envs" }} +{{- include "airbyte.auth.jwt.jwtSignatureSecret.env" . }} +{{- end }} + +{{/* +Renders the auth.security secret name +*/}} +{{- define "airbyte.auth.security.secretName" }} +{{- if .Values.global.auth.security.secretName }} + {{- .Values.global.auth.security.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.auth.security.cookieSecureSetting value +*/}} +{{- define "airbyte.auth.security.cookieSecureSetting" }} + {{- .Values.global.auth.security.cookieSecureSetting | default true }} +{{- end }} + +{{/* +Renders the auth.security.cookieSecureSetting environment variable +*/}} +{{- define "airbyte.auth.security.cookieSecureSetting.env" }} +- name: AB_COOKIE_SECURE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_COOKIE_SECURE +{{- end }} + +{{/* +Renders the global.auth.security.cookieSameSiteSetting value +*/}} +{{- define "airbyte.auth.security.cookieSameSiteSetting" }} + {{- .Values.global.auth.security.cookieSameSiteSetting | default "strict" }} +{{- end }} + +{{/* +Renders the auth.security.cookieSameSiteSetting environment variable +*/}} +{{- define "airbyte.auth.security.cookieSameSiteSetting.env" }} +- name: AB_COOKIE_SAME_SITE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_COOKIE_SAME_SITE +{{- end }} + +{{/* +Renders the set of all auth.security environment variables +*/}} +{{- define "airbyte.auth.security.envs" }} +{{- include "airbyte.auth.security.cookieSecureSetting.env" . }} +{{- include "airbyte.auth.security.cookieSameSiteSetting.env" . }} +{{- end }} + +{{/* +Renders the set of all auth.security config map variables +*/}} +{{- define "airbyte.auth.security.configVars" }} +AB_COOKIE_SECURE: {{ include "airbyte.auth.security.cookieSecureSetting" . | quote }} +AB_COOKIE_SAME_SITE: {{ include "airbyte.auth.security.cookieSameSiteSetting" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_common.tpl b/charts/v2/airbyte/templates/config/_common.tpl new file mode 100644 index 00000000000..32651685873 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_common.tpl @@ -0,0 +1,309 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Common Configuration +*/}} + +{{/* +Renders the common secret name +*/}} +{{- define "airbyte.common.secretName" }} +{{- if .Values.global.secretName }} + {{- .Values.global.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.edition value +*/}} +{{- define "airbyte.common.edition" }} + {{- ternary "pro" "community" (or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise")) }} +{{- end }} + +{{/* +Renders the common.edition environment variable +*/}} +{{- define "airbyte.common.edition.env" }} +- name: AIRBYTE_EDITION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_EDITION +{{- end }} + +{{/* +Renders the global.version value +*/}} +{{- define "airbyte.common.version" }} + {{- .Values.global.version | default .Chart.AppVersion }} +{{- end }} + +{{/* +Renders the common.version environment variable +*/}} +{{- define "airbyte.common.version.env" }} +- name: AIRBYTE_VERSION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_VERSION +{{- end }} + +{{/* +Renders the global.cluster.type value +*/}} +{{- define "airbyte.common.cluster.type" }} + {{- .Values.global.cluster.type }} +{{- end }} + +{{/* +Renders the common.cluster.type environment variable +*/}} +{{- define "airbyte.common.cluster.type.env" }} +- name: AIRBYTE_CLUSTER_TYPE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_CLUSTER_TYPE +{{- end }} + +{{/* +Renders the global.cluster.name value +*/}} +{{- define "airbyte.common.cluster.name" }} + {{- .Values.global.cluster.name }} +{{- end }} + +{{/* +Renders the common.cluster.name environment variable +*/}} +{{- define "airbyte.common.cluster.name.env" }} +- name: AIRBYTE_CLUSTER_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_CLUSTER_NAME +{{- end }} + +{{/* +Renders the global.deploymentMode value +*/}} +{{- define "airbyte.common.deploymentMode" }} + {{- upper .Values.global.deploymentMode }} +{{- end }} + +{{/* +Renders the common.deploymentMode environment variable +*/}} +{{- define "airbyte.common.deploymentMode.env" }} +- name: AIRBYTE_DEPLOYMENT_MODE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_DEPLOYMENT_MODE +{{- end }} + +{{/* +Renders the global.airbyteUrl value +*/}} +{{- define "airbyte.common.airbyteUrl" }} + {{- .Values.global.airbyteUrl }} +{{- end }} + +{{/* +Renders the common.airbyteUrl environment variable +*/}} +{{- define "airbyte.common.airbyteUrl.env" }} +- name: AIRBYTE_URL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_URL +{{- end }} + +{{/* +Renders the global.api.host value +*/}} +{{- define "airbyte.common.api.host" }} + {{- ternary (printf "http://localhost:%d/api/public" (int .Values.server.service.port)) (printf "%s/api/public" .Values.global.airbyteUrl) (eq .Values.global.edition "community") }} +{{- end }} + +{{/* +Renders the common.api.host environment variable +*/}} +{{- define "airbyte.common.api.host.env" }} +- name: AIRBYTE_API_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_API_HOST +{{- end }} + +{{/* +Renders the global.api.authHeaderName value +*/}} +{{- define "airbyte.common.api.authHeaderName" }} + {{- .Values.global.api.authHeaderName | default "X-Airbyte-Auth" }} +{{- end }} + +{{/* +Renders the common.api.authHeaderName environment variable +*/}} +{{- define "airbyte.common.api.authHeaderName.env" }} +- name: AIRBYTE_API_AUTH_HEADER_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_API_AUTH_HEADER_NAME +{{- end }} + +{{/* +Renders the global.server.host value +*/}} +{{- define "airbyte.common.server.host" }} + {{- (printf "%s-airbyte-server-svc:%d" .Release.Name (int .Values.server.service.port)) }} +{{- end }} + +{{/* +Renders the common.server.host environment variable +*/}} +{{- define "airbyte.common.server.host.env" }} +- name: AIRBYTE_SERVER_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AIRBYTE_SERVER_HOST +{{- end }} + +{{/* +Renders the global.api.authEnabled value +*/}} +{{- define "airbyte.common.api.authEnabled" }} + {{- .Values.global.api.authEnabled | default true }} +{{- end }} + +{{/* +Renders the common.api.authEnabled environment variable +*/}} +{{- define "airbyte.common.api.authEnabled.env" }} +- name: API_AUTHORIZATION_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: API_AUTHORIZATION_ENABLED +{{- end }} + +{{/* +Renders the global.connectorBuilderServer.apiHost value +*/}} +{{- define "airbyte.common.connectorBuilderServer.apiHost" }} + {{- (printf "http://%s-airbyte-connector-builder-server-svc:%d" .Release.Name (int .Values.connectorBuilderServer.service.port)) }} +{{- end }} + +{{/* +Renders the common.connectorBuilderServer.apiHost environment variable +*/}} +{{- define "airbyte.common.connectorBuilderServer.apiHost.env" }} +- name: CONNECTOR_BUILDER_SERVER_API_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONNECTOR_BUILDER_SERVER_API_HOST +{{- end }} + +{{/* +Renders the global.api.internalHost value +*/}} +{{- define "airbyte.common.api.internalHost" }} + {{- (printf "http://%s-airbyte-server-svc:%d" .Release.Name (int .Values.server.service.port)) }} +{{- end }} + +{{/* +Renders the common.api.internalHost environment variable +*/}} +{{- define "airbyte.common.api.internalHost.env" }} +- name: INTERNAL_API_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: INTERNAL_API_HOST +{{- end }} + +{{/* +Renders the global.local value +*/}} +{{- define "airbyte.common.local" }} + {{- .Values.global.local | default false }} +{{- end }} + +{{/* +Renders the common.local environment variable +*/}} +{{- define "airbyte.common.local.env" }} +- name: LOCAL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: LOCAL +{{- end }} + +{{/* +Renders the global.webapp.url value +*/}} +{{- define "airbyte.common.webapp.url" }} + {{- (printf "http://%s-airbyte-webapp-svc:%d" .Release.Name (int .Values.webapp.service.port)) }} +{{- end }} + +{{/* +Renders the common.webapp.url environment variable +*/}} +{{- define "airbyte.common.webapp.url.env" }} +- name: WEBAPP_URL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WEBAPP_URL +{{- end }} + +{{/* +Renders the set of all common environment variables +*/}} +{{- define "airbyte.common.envs" }} +{{- include "airbyte.common.edition.env" . }} +{{- include "airbyte.common.version.env" . }} +{{- include "airbyte.common.cluster.type.env" . }} +{{- include "airbyte.common.cluster.name.env" . }} +{{- include "airbyte.common.deploymentMode.env" . }} +{{- include "airbyte.common.airbyteUrl.env" . }} +{{- include "airbyte.common.api.host.env" . }} +{{- include "airbyte.common.api.authHeaderName.env" . }} +{{- include "airbyte.common.server.host.env" . }} +{{- include "airbyte.common.api.authEnabled.env" . }} +{{- include "airbyte.common.connectorBuilderServer.apiHost.env" . }} +{{- include "airbyte.common.api.internalHost.env" . }} +{{- include "airbyte.common.local.env" . }} +{{- include "airbyte.common.webapp.url.env" . }} +{{- end }} + +{{/* +Renders the set of all common config map variables +*/}} +{{- define "airbyte.common.configVars" }} +AIRBYTE_EDITION: {{ ternary "pro" "community" (or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise")) | quote }} +AIRBYTE_VERSION: {{ include "airbyte.common.version" . | quote }} +AIRBYTE_CLUSTER_TYPE: {{ include "airbyte.common.cluster.type" . | quote }} +AIRBYTE_CLUSTER_NAME: {{ include "airbyte.common.cluster.name" . | quote }} +AIRBYTE_DEPLOYMENT_MODE: {{ upper .Values.global.deploymentMode | quote }} +AIRBYTE_URL: {{ include "airbyte.common.airbyteUrl" . | quote }} +AIRBYTE_API_HOST: {{ ternary (printf "http://localhost:%d/api/public" (int .Values.server.service.port)) (printf "%s/api/public" .Values.global.airbyteUrl) (eq .Values.global.edition "community") | quote }} +AIRBYTE_API_AUTH_HEADER_NAME: {{ include "airbyte.common.api.authHeaderName" . | quote }} +AIRBYTE_SERVER_HOST: {{ (printf "%s-airbyte-server-svc:%d" .Release.Name (int .Values.server.service.port)) | quote }} +API_AUTHORIZATION_ENABLED: {{ include "airbyte.common.api.authEnabled" . | quote }} +CONNECTOR_BUILDER_SERVER_API_HOST: {{ (printf "http://%s-airbyte-connector-builder-server-svc:%d" .Release.Name (int .Values.connectorBuilderServer.service.port)) | quote }} +INTERNAL_API_HOST: {{ (printf "http://%s-airbyte-server-svc:%d" .Release.Name (int .Values.server.service.port)) | quote }} +LOCAL: {{ include "airbyte.common.local" . | quote }} +WEBAPP_URL: {{ (printf "http://%s-airbyte-webapp-svc:%d" .Release.Name (int .Values.webapp.service.port)) | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_connector.tpl b/charts/v2/airbyte/templates/config/_connector.tpl new file mode 100644 index 00000000000..34384dbbf8e --- /dev/null +++ b/charts/v2/airbyte/templates/config/_connector.tpl @@ -0,0 +1,69 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Connector Configuration +*/}} + +{{/* +Renders the connector secret name +*/}} +{{- define "airbyte.connector.secretName" }} +{{- if .Values.global.connectorRegistry.secretName }} + {{- .Values.global.connectorRegistry.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.connectorRegistry.seedProvider value +*/}} +{{- define "airbyte.connector.seedProvider" }} + {{- .Values.global.connectorRegistry.seedProvider }} +{{- end }} + +{{/* +Renders the connector.seedProvider environment variable +*/}} +{{- define "airbyte.connector.seedProvider.env" }} +- name: CONNECTORY_REGISTRY_SEED_PROVIDER + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONNECTORY_REGISTRY_SEED_PROVIDER +{{- end }} + +{{/* +Renders the global.connectorRegistry.enterpriseSourceStubsUrl value +*/}} +{{- define "airbyte.connector.enterpriseSourceStubsUrl" }} + {{- .Values.global.connectorRegistry.enterpriseSourceStubsUrl | default "https://connectors.airbyte.com/files/resources/connector_stubs/v0/connector_stubs.json" }} +{{- end }} + +{{/* +Renders the connector.enterpriseSourceStubsUrl environment variable +*/}} +{{- define "airbyte.connector.enterpriseSourceStubsUrl.env" }} +- name: ENTERPRISE_SOURCE_STUBS_URL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: ENTERPRISE_SOURCE_STUBS_URL +{{- end }} + +{{/* +Renders the set of all connector environment variables +*/}} +{{- define "airbyte.connector.envs" }} +{{- include "airbyte.connector.seedProvider.env" . }} +{{- include "airbyte.connector.enterpriseSourceStubsUrl.env" . }} +{{- end }} + +{{/* +Renders the set of all connector config map variables +*/}} +{{- define "airbyte.connector.configVars" }} +CONNECTORY_REGISTRY_SEED_PROVIDER: {{ include "airbyte.connector.seedProvider" . | quote }} +ENTERPRISE_SOURCE_STUBS_URL: {{ include "airbyte.connector.enterpriseSourceStubsUrl" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_cron.tpl b/charts/v2/airbyte/templates/config/_cron.tpl new file mode 100644 index 00000000000..545de572ff8 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_cron.tpl @@ -0,0 +1,38 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Cron Configuration +*/}} + +{{/* +Renders the cron.jobs.updateDefinitions.enabled value +*/}} +{{- define "airbyte.cron.jobs.updateDefinitions.enabled" }} + {{- .Values.cron.jobs.updateDefinitions.enabled }} +{{- end }} + +{{/* +Renders the cron.jobs.updateDefinitions.enabled environment variable +*/}} +{{- define "airbyte.cron.jobs.updateDefinitions.enabled.env" }} +- name: UPDATE_DEFINITIONS_CRON_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: UPDATE_DEFINITIONS_CRON_ENABLED +{{- end }} + +{{/* +Renders the set of all cron environment variables +*/}} +{{- define "airbyte.cron.envs" }} +{{- include "airbyte.cron.jobs.updateDefinitions.enabled.env" . }} +{{- end }} + +{{/* +Renders the set of all cron config map variables +*/}} +{{- define "airbyte.cron.configVars" }} +UPDATE_DEFINITIONS_CRON_ENABLED: {{ include "airbyte.cron.jobs.updateDefinitions.enabled" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_customerio.tpl b/charts/v2/airbyte/templates/config/_customerio.tpl new file mode 100644 index 00000000000..29dcd65320b --- /dev/null +++ b/charts/v2/airbyte/templates/config/_customerio.tpl @@ -0,0 +1,56 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Customerio Configuration +*/}} + +{{/* +Renders the customerio secret name +*/}} +{{- define "airbyte.customerio.secretName" }} +{{- if .Values.global.customerio.secretName }} + {{- .Values.global.customerio.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.customerio.apiKey value +*/}} +{{- define "airbyte.customerio.apiKey" }} + {{- .Values.global.customerio.apiKey }} +{{- end }} + +{{/* +Renders the customerio.apiKey secret key +*/}} +{{- define "airbyte.customerio.apiKey.secretKey" }} + {{- .Values.global.customerio.apiKeySecretKey | default "CUSTOMERIO_API_KEY" }} +{{- end }} + +{{/* +Renders the customerio.apiKey environment variable +*/}} +{{- define "airbyte.customerio.apiKey.env" }} +- name: CUSTOMERIO_API_KEY + valueFrom: + secretKeyRef: + name: {{ include "airbyte.customerio.secretName" . }} + key: {{ include "airbyte.customerio.apiKey.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all customerio environment variables +*/}} +{{- define "airbyte.customerio.envs" }} +{{- include "airbyte.customerio.apiKey.env" . }} +{{- end }} + +{{/* +Renders the set of all customerio secret variables +*/}} +{{- define "airbyte.customerio.secrets" }} +CUSTOMERIO_API_KEY: {{ include "airbyte.customerio.apiKey" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_database.tpl b/charts/v2/airbyte/templates/config/_database.tpl new file mode 100644 index 00000000000..83adea562d6 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_database.tpl @@ -0,0 +1,252 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Database Configuration +*/}} + +{{/* +Renders the database secret name +*/}} +{{- define "airbyte.database.secretName" }} +{{- if .Values.global.database.secretName }} + {{- .Values.global.database.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.database.host value +*/}} +{{- define "airbyte.database.host" }} + {{- .Values.global.database.host | default (printf "airbyte-db-svc.%s.svc.cluster.local" .Release.Namespace) }} +{{- end }} + +{{/* +Renders the database.host environment variable +*/}} +{{- define "airbyte.database.host.env" }} +- name: DATABASE_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATABASE_HOST +{{- end }} + +{{/* +Renders the global.database.port value +*/}} +{{- define "airbyte.database.port" }} + {{- .Values.global.database.port | default 5432 }} +{{- end }} + +{{/* +Renders the database.port environment variable +*/}} +{{- define "airbyte.database.port.env" }} +- name: DATABASE_PORT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATABASE_PORT +{{- end }} + +{{/* +Renders the global.database.url value +*/}} +{{- define "airbyte.database.url" }} + {{- (printf "jdbc:postgresql://%s:%d/%s" (include "airbyte.database.host" .) (int (include "airbyte.database.port" .)) (include "airbyte.database.name" .)) }} +{{- end }} + +{{/* +Renders the database.url environment variable +*/}} +{{- define "airbyte.database.url.env" }} +- name: DATABASE_URL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATABASE_URL +{{- end }} + +{{/* +Renders the global.database.user value +*/}} +{{- define "airbyte.database.user" }} + {{- .Values.global.database.user | default "airbyte" }} +{{- end }} + +{{/* +Renders the database.user secret key +*/}} +{{- define "airbyte.database.user.secretKey" }} + {{- .Values.global.database.userSecretKey | default "DATABASE_USER" }} +{{- end }} + +{{/* +Renders the database.user environment variable +*/}} +{{- define "airbyte.database.user.env" }} +- name: DATABASE_USER + valueFrom: + secretKeyRef: + name: {{ include "airbyte.database.secretName" . }} + key: {{ include "airbyte.database.user.secretKey" . }} +{{- end }} + +{{/* +Renders the global.database.password value +*/}} +{{- define "airbyte.database.password" }} + {{- .Values.global.database.password | default "airbyte" }} +{{- end }} + +{{/* +Renders the database.password secret key +*/}} +{{- define "airbyte.database.password.secretKey" }} + {{- .Values.global.database.passwordSecretKey | default "DATABASE_PASSWORD" }} +{{- end }} + +{{/* +Renders the database.password environment variable +*/}} +{{- define "airbyte.database.password.env" }} +- name: DATABASE_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "airbyte.database.secretName" . }} + key: {{ include "airbyte.database.password.secretKey" . }} +{{- end }} + +{{/* +Renders the global.database.name value +*/}} +{{- define "airbyte.database.name" }} + {{- .Values.global.database.name | default "db-airbyte" }} +{{- end }} + +{{/* +Renders the database.name environment variable +*/}} +{{- define "airbyte.database.name.env" }} +- name: DATABASE_DB + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATABASE_DB +{{- end }} + +{{/* +Renders the set of all database environment variables +*/}} +{{- define "airbyte.database.envs" }} +{{- include "airbyte.database.host.env" . }} +{{- include "airbyte.database.port.env" . }} +{{- include "airbyte.database.url.env" . }} +{{- include "airbyte.database.user.env" . }} +{{- include "airbyte.database.password.env" . }} +{{- include "airbyte.database.name.env" . }} +{{- end }} + +{{/* +Renders the set of all database config map variables +*/}} +{{- define "airbyte.database.configVars" }} +DATABASE_HOST: {{ include "airbyte.database.host" . | quote }} +DATABASE_PORT: {{ include "airbyte.database.port" . | quote }} +DATABASE_URL: {{ (printf "jdbc:postgresql://%s:%d/%s" (include "airbyte.database.host" .) (int (include "airbyte.database.port" .)) (include "airbyte.database.name" .)) | quote }} +DATABASE_DB: {{ include "airbyte.database.name" . | quote }} +{{- end }} + +{{/* +Renders the set of all database secret variables +*/}} +{{- define "airbyte.database.secrets" }} +DATABASE_USER: {{ include "airbyte.database.user" . | quote }} +DATABASE_PASSWORD: {{ include "airbyte.database.password" . | quote }} +{{- end }} + +{{/* +Renders the database.migrations secret name +*/}} +{{- define "airbyte.database.migrations.secretName" }} +{{- if .Values.global.migrations.secretName }} + {{- .Values.global.migrations.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.migrations.runAtStartup value +*/}} +{{- define "airbyte.database.migrations.runAtStartup" }} + {{- .Values.global.migrations.runAtStartup | default true }} +{{- end }} + +{{/* +Renders the database.migrations.runAtStartup environment variable +*/}} +{{- define "airbyte.database.migrations.runAtStartup.env" }} +- name: RUN_DATABASE_MIGRATION_ON_STARTUP + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: RUN_DATABASE_MIGRATION_ON_STARTUP +{{- end }} + +{{/* +Renders the global.migrations.configDb.minimumFlywayMigrationVersion value +*/}} +{{- define "airbyte.database.migrations.configDb.minimumFlywayMigrationVersion" }} + {{- .Values.global.migrations.configDb.minimumFlywayMigrationVersion | default "0.35.15.001" }} +{{- end }} + +{{/* +Renders the database.migrations.configDb.minimumFlywayMigrationVersion environment variable +*/}} +{{- define "airbyte.database.migrations.configDb.minimumFlywayMigrationVersion.env" }} +- name: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION +{{- end }} + +{{/* +Renders the global.migrations.jobsDb.minimumFlywayMigrationVersion value +*/}} +{{- define "airbyte.database.migrations.jobsDb.minimumFlywayMigrationVersion" }} + {{- .Values.global.migrations.jobsDb.minimumFlywayMigrationVersion | default "0.29.15.001" }} +{{- end }} + +{{/* +Renders the database.migrations.jobsDb.minimumFlywayMigrationVersion environment variable +*/}} +{{- define "airbyte.database.migrations.jobsDb.minimumFlywayMigrationVersion.env" }} +- name: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION +{{- end }} + +{{/* +Renders the set of all database.migrations environment variables +*/}} +{{- define "airbyte.database.migrations.envs" }} +{{- include "airbyte.database.migrations.runAtStartup.env" . }} +{{- include "airbyte.database.migrations.configDb.minimumFlywayMigrationVersion.env" . }} +{{- include "airbyte.database.migrations.jobsDb.minimumFlywayMigrationVersion.env" . }} +{{- end }} + +{{/* +Renders the set of all database.migrations config map variables +*/}} +{{- define "airbyte.database.migrations.configVars" }} +RUN_DATABASE_MIGRATION_ON_STARTUP: {{ include "airbyte.database.migrations.runAtStartup" . | quote }} +CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION: {{ include "airbyte.database.migrations.configDb.minimumFlywayMigrationVersion" . | quote }} +JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION: {{ include "airbyte.database.migrations.jobsDb.minimumFlywayMigrationVersion" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_datadog.tpl b/charts/v2/airbyte/templates/config/_datadog.tpl new file mode 100644 index 00000000000..6aa6dcc9187 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_datadog.tpl @@ -0,0 +1,369 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Datadog Configuration +*/}} + +{{/* +Renders the datadog secret name +*/}} +{{- define "airbyte.datadog.secretName" }} +{{- if .Values.global.datadog.secretName }} + {{- .Values.global.datadog.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.datadog.agentHost value +*/}} +{{- define "airbyte.datadog.agentHost" }} + {{- .Values.global.datadog.agentHost }} +{{- end }} + +{{/* +Renders the datadog.agentHost environment variable +*/}} +{{- define "airbyte.datadog.agentHost.env" }} +- name: DD_AGENT_HOST + valueFrom: + fieldRef: + fieldPath: status.hostIP + +{{- end }} + +{{/* +Renders the global.datadog.enabled value +*/}} +{{- define "airbyte.datadog.enabled" }} + {{- .Values.global.datadog.enabled | default false }} +{{- end }} + +{{/* +Renders the datadog.enabled environment variable +*/}} +{{- define "airbyte.datadog.enabled.env" }} +- name: DD_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.env value +*/}} +{{- define "airbyte.datadog.env" }} + {{- .Values.global.datadog.env }} +{{- end }} + +{{/* +Renders the datadog.env environment variable +*/}} +{{- define "airbyte.datadog.env.env" }} +- name: DD_ENV + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_ENV +{{- end }} + +{{/* +Renders the global.datadog.service value +*/}} +{{- define "airbyte.datadog.service" }} + {{- (printf "airbyte-%s" .Chart.Name) }} +{{- end }} + +{{/* +Renders the datadog.service environment variable +*/}} +{{- define "airbyte.datadog.service.env" }} +- name: DD_SERVICE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_SERVICE +{{- end }} + +{{/* +Renders the global.datadog.version value +*/}} +{{- define "airbyte.datadog.version" }} + {{- .Values.global.image.tag }} +{{- end }} + +{{/* +Renders the datadog.version environment variable +*/}} +{{- define "airbyte.datadog.version.env" }} +- name: DD_VERSION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_VERSION +{{- end }} + +{{/* +Renders the global.datadog.statsd.port value +*/}} +{{- define "airbyte.datadog.statsd.port" }} + {{- .Values.global.datadog.statsd.port }} +{{- end }} + +{{/* +Renders the datadog.statsd.port environment variable +*/}} +{{- define "airbyte.datadog.statsd.port.env" }} +- name: DD_DOGSTATSD_PORT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_DOGSTATSD_PORT +{{- end }} + +{{/* +Renders the global.datadog.traceAgentPort value +*/}} +{{- define "airbyte.datadog.traceAgentPort" }} + {{- .Values.global.datadog.traceAgentPort }} +{{- end }} + +{{/* +Renders the datadog.traceAgentPort environment variable +*/}} +{{- define "airbyte.datadog.traceAgentPort.env" }} +- name: DD_TRACE_AGENT_PORT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_TRACE_AGENT_PORT +{{- end }} + +{{/* +Renders the global.datadog.integrations.dbm.enabled value +*/}} +{{- define "airbyte.datadog.integrations.dbm.enabled" }} + {{- .Values.global.datadog.integrations.dbm.enabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.dbm.enabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.dbm.enabled.env" }} +- name: DD_INTEGRATION_DBM_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_DBM_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.integrations.dbm.propagationMode value +*/}} +{{- define "airbyte.datadog.integrations.dbm.propagationMode" }} + {{- .Values.global.datadog.integrations.dbm.propagationMode | default "full" }} +{{- end }} + +{{/* +Renders the datadog.integrations.dbm.propagationMode environment variable +*/}} +{{- define "airbyte.datadog.integrations.dbm.propagationMode.env" }} +- name: DD_INTEGRATION_DBM_PROPAGATION_MODE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_DBM_PROPAGATION_MODE +{{- end }} + +{{/* +Renders the global.datadog.integrations.googleHttpClient.enabled value +*/}} +{{- define "airbyte.datadog.integrations.googleHttpClient.enabled" }} + {{- .Values.global.datadog.integrations.googleHttpClient.enabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.googleHttpClient.enabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.googleHttpClient.enabled.env" }} +- name: DD_INTEGRATION_GOOGLE_HTTP_CLIENT_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_GOOGLE_HTTP_CLIENT_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.integrations.grpc.enabled value +*/}} +{{- define "airbyte.datadog.integrations.grpc.enabled" }} + {{- .Values.global.datadog.integrations.grpc.enabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.grpc.enabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.grpc.enabled.env" }} +- name: DD_INTEGRATION_GRPC_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_GRPC_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.integrations.grpc.clientEnabled value +*/}} +{{- define "airbyte.datadog.integrations.grpc.clientEnabled" }} + {{- .Values.global.datadog.integrations.grpc.clientEnabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.grpc.clientEnabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.grpc.clientEnabled.env" }} +- name: DD_INTEGRATION_GRPC_CLIENT_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_GRPC_CLIENT_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.integrations.grpc.serverEnabled value +*/}} +{{- define "airbyte.datadog.integrations.grpc.serverEnabled" }} + {{- .Values.global.datadog.integrations.grpc.serverEnabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.grpc.serverEnabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.grpc.serverEnabled.env" }} +- name: DD_INTEGRATION_GRPC_SERVER_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_GRPC_SERVER_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.integrations.httpUrlConnection.enabled value +*/}} +{{- define "airbyte.datadog.integrations.httpUrlConnection.enabled" }} + {{- .Values.global.datadog.integrations.httpUrlConnection.enabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.httpUrlConnection.enabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.httpUrlConnection.enabled.env" }} +- name: DD_INTEGRATION_HTTPURLCONNECTION_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_HTTPURLCONNECTION_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.integrations.netty.enabled value +*/}} +{{- define "airbyte.datadog.integrations.netty.enabled" }} + {{- .Values.global.datadog.integrations.netty.enabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.netty.enabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.netty.enabled.env" }} +- name: DD_INTEGRATION_NETTY_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_NETTY_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.integrations.netty41.enabled value +*/}} +{{- define "airbyte.datadog.integrations.netty41.enabled" }} + {{- .Values.global.datadog.integrations.netty41.enabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.netty41.enabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.netty41.enabled.env" }} +- name: DD_INTEGRATION_NETTY_4_1_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_NETTY_4_1_ENABLED +{{- end }} + +{{/* +Renders the global.datadog.integrations.urlConnection.enabled value +*/}} +{{- define "airbyte.datadog.integrations.urlConnection.enabled" }} + {{- .Values.global.datadog.integrations.urlConnection.enabled | default false }} +{{- end }} + +{{/* +Renders the datadog.integrations.urlConnection.enabled environment variable +*/}} +{{- define "airbyte.datadog.integrations.urlConnection.enabled.env" }} +- name: DD_INTEGRATION_URLCONNECTION_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DD_INTEGRATION_URLCONNECTION_ENABLED +{{- end }} + +{{/* +Renders the set of all datadog environment variables +*/}} +{{- define "airbyte.datadog.envs" }} +{{- include "airbyte.datadog.agentHost.env" . }} +{{- include "airbyte.datadog.enabled.env" . }} +{{- include "airbyte.datadog.env.env" . }} +{{- include "airbyte.datadog.service.env" . }} +{{- include "airbyte.datadog.version.env" . }} +{{- include "airbyte.datadog.statsd.port.env" . }} +{{- include "airbyte.datadog.traceAgentPort.env" . }} +{{- include "airbyte.datadog.integrations.dbm.enabled.env" . }} +{{- include "airbyte.datadog.integrations.dbm.propagationMode.env" . }} +{{- include "airbyte.datadog.integrations.googleHttpClient.enabled.env" . }} +{{- include "airbyte.datadog.integrations.grpc.enabled.env" . }} +{{- include "airbyte.datadog.integrations.grpc.clientEnabled.env" . }} +{{- include "airbyte.datadog.integrations.grpc.serverEnabled.env" . }} +{{- include "airbyte.datadog.integrations.httpUrlConnection.enabled.env" . }} +{{- include "airbyte.datadog.integrations.netty.enabled.env" . }} +{{- include "airbyte.datadog.integrations.netty41.enabled.env" . }} +{{- include "airbyte.datadog.integrations.urlConnection.enabled.env" . }} +{{- end }} + +{{/* +Renders the set of all datadog config map variables +*/}} +{{- define "airbyte.datadog.configVars" }} +DD_AGENT_HOST: {{ include "airbyte.datadog.agentHost" . | quote }} +DD_ENABLED: {{ include "airbyte.datadog.enabled" . | quote }} +DD_ENV: {{ include "airbyte.datadog.env" . | quote }} +DD_SERVICE: {{ (printf "airbyte-%s" .Chart.Name) | quote }} +DD_VERSION: {{ .Values.global.image.tag | quote }} +DD_DOGSTATSD_PORT: {{ include "airbyte.datadog.statsd.port" . | quote }} +DD_TRACE_AGENT_PORT: {{ include "airbyte.datadog.traceAgentPort" . | quote }} +DD_INTEGRATION_DBM_ENABLED: {{ include "airbyte.datadog.integrations.dbm.enabled" . | quote }} +DD_INTEGRATION_DBM_PROPAGATION_MODE: {{ include "airbyte.datadog.integrations.dbm.propagationMode" . | quote }} +DD_INTEGRATION_GOOGLE_HTTP_CLIENT_ENABLED: {{ include "airbyte.datadog.integrations.googleHttpClient.enabled" . | quote }} +DD_INTEGRATION_GRPC_ENABLED: {{ include "airbyte.datadog.integrations.grpc.enabled" . | quote }} +DD_INTEGRATION_GRPC_CLIENT_ENABLED: {{ include "airbyte.datadog.integrations.grpc.clientEnabled" . | quote }} +DD_INTEGRATION_GRPC_SERVER_ENABLED: {{ include "airbyte.datadog.integrations.grpc.serverEnabled" . | quote }} +DD_INTEGRATION_HTTPURLCONNECTION_ENABLED: {{ include "airbyte.datadog.integrations.httpUrlConnection.enabled" . | quote }} +DD_INTEGRATION_NETTY_ENABLED: {{ include "airbyte.datadog.integrations.netty.enabled" . | quote }} +DD_INTEGRATION_NETTY_4_1_ENABLED: {{ include "airbyte.datadog.integrations.netty41.enabled" . | quote }} +DD_INTEGRATION_URLCONNECTION_ENABLED: {{ include "airbyte.datadog.integrations.urlConnection.enabled" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_enterprise.tpl b/charts/v2/airbyte/templates/config/_enterprise.tpl new file mode 100644 index 00000000000..543be72773f --- /dev/null +++ b/charts/v2/airbyte/templates/config/_enterprise.tpl @@ -0,0 +1,56 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Enterprise Configuration +*/}} + +{{/* +Renders the enterprise secret name +*/}} +{{- define "airbyte.enterprise.secretName" }} +{{- if .Values.global.enterprise.secretName }} + {{- .Values.global.enterprise.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.enterprise.licenseKey value +*/}} +{{- define "airbyte.enterprise.licenseKey" }} + {{- .Values.global.enterprise.licenseKey }} +{{- end }} + +{{/* +Renders the enterprise.licenseKey secret key +*/}} +{{- define "airbyte.enterprise.licenseKey.secretKey" }} + {{- .Values.global.enterprise.licenseKeySecretKey | default "AIRBYTE_LICENSE_KEY" }} +{{- end }} + +{{/* +Renders the enterprise.licenseKey environment variable +*/}} +{{- define "airbyte.enterprise.licenseKey.env" }} +- name: AIRBYTE_LICENSE_KEY + valueFrom: + secretKeyRef: + name: {{ include "airbyte.enterprise.secretName" . }} + key: {{ include "airbyte.enterprise.licenseKey.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all enterprise environment variables +*/}} +{{- define "airbyte.enterprise.envs" }} +{{- include "airbyte.enterprise.licenseKey.env" . }} +{{- end }} + +{{/* +Renders the set of all enterprise secret variables +*/}} +{{- define "airbyte.enterprise.secrets" }} +AIRBYTE_LICENSE_KEY: {{ include "airbyte.enterprise.licenseKey" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_featureFlags.tpl b/charts/v2/airbyte/templates/config/_featureFlags.tpl new file mode 100644 index 00000000000..8c3fea3af1a --- /dev/null +++ b/charts/v2/airbyte/templates/config/_featureFlags.tpl @@ -0,0 +1,126 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Featureflags Configuration +*/}} + +{{/* +Renders the featureFlags secret name +*/}} +{{- define "airbyte.featureFlags.secretName" }} +{{- if .Values.global.featureFlags.secretName }} + {{- .Values.global.featureFlags.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.featureFlags.client value +*/}} +{{- define "airbyte.featureFlags.client" }} + {{- .Values.global.featureFlags.client | default "configfile" }} +{{- end }} + +{{/* +Renders the featureFlags.client environment variable +*/}} +{{- define "airbyte.featureFlags.client.env" }} +- name: FEATURE_FLAG_CLIENT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: FEATURE_FLAG_CLIENT +{{- end }} + +{{/* +Renders the global.featureFlags.configfile.path value +*/}} +{{- define "airbyte.featureFlags.configfile.path" }} + {{- .Values.global.featureFlags.configfile.path | default "/etc/launchdarkly/flags.yml" }} +{{- end }} + +{{/* +Renders the featureFlags.configfile.path environment variable +*/}} +{{- define "airbyte.featureFlags.configfile.path.env" }} +- name: FEATURE_FLAG_PATH + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: FEATURE_FLAG_PATH +{{- end }} + +{{/* +Renders the global.featureFlags.launchdarkly.key value +*/}} +{{- define "airbyte.featureFlags.launchdarkly.key" }} + {{- .Values.global.featureFlags.launchdarkly.key }} +{{- end }} + +{{/* +Renders the featureFlags.launchdarkly.key secret key +*/}} +{{- define "airbyte.featureFlags.launchdarkly.key.secretKey" }} + {{- .Values.global.featureFlags.launchdarkly.keySecretKey | default "LAUNCHDARKLY_KEY" }} +{{- end }} + +{{/* +Renders the featureFlags.launchdarkly.key environment variable +*/}} +{{- define "airbyte.featureFlags.launchdarkly.key.env" }} +- name: LAUNCHDARKLY_KEY + valueFrom: + secretKeyRef: + name: {{ include "airbyte.featureFlags.secretName" . }} + key: {{ include "airbyte.featureFlags.launchdarkly.key.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all featureFlags environment variables +*/}} +{{- define "airbyte.featureFlags.envs" }} +{{- include "airbyte.featureFlags.client.env" . }} +{{- $opt := (include "airbyte.featureFlags.client" .) }} + +{{- if eq $opt "configfile" }} +{{- include "airbyte.featureFlags.configfile.path.env" . }} +{{- end }} + +{{- if eq $opt "launchdarkly" }} +{{- include "airbyte.featureFlags.launchdarkly.key.env" . }} +{{- end }} + +{{- end }} + +{{/* +Renders the set of all featureFlags config map variables +*/}} +{{- define "airbyte.featureFlags.configVars" }} +FEATURE_FLAG_CLIENT: {{ include "airbyte.featureFlags.client" . | quote }} +{{- $opt := (include "airbyte.featureFlags.client" .) }} + +{{- if eq $opt "configfile" }} +FEATURE_FLAG_PATH: {{ include "airbyte.featureFlags.configfile.path" . | quote }} +{{- end }} + +{{- if eq $opt "launchdarkly" }} +{{- end }} + +{{- end }} + +{{/* +Renders the set of all featureFlags secret variables +*/}} +{{- define "airbyte.featureFlags.secrets" }} +{{- $opt := (include "airbyte.featureFlags.client" .) }} + +{{- if eq $opt "configfile" }} +{{- end }} + +{{- if eq $opt "launchdarkly" }} +LAUNCHDARKLY_KEY: {{ include "airbyte.featureFlags.launchdarkly.key" . | quote }} +{{- end }} + +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_java.tpl b/charts/v2/airbyte/templates/config/_java.tpl new file mode 100644 index 00000000000..8cf92dfb65c --- /dev/null +++ b/charts/v2/airbyte/templates/config/_java.tpl @@ -0,0 +1,49 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Java Configuration +*/}} + +{{/* +Renders the java secret name +*/}} +{{- define "airbyte.java.secretName" }} +{{- if .Values.global.java.secretName }} + {{- .Values.global.java.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.java.opts value +*/}} +{{- define "airbyte.java.opts" }} + {{- join " " .Values.global.java.opts }} +{{- end }} + +{{/* +Renders the java.opts environment variable +*/}} +{{- define "airbyte.java.opts.env" }} +- name: JAVA_TOOL_OPTIONS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JAVA_TOOL_OPTIONS +{{- end }} + +{{/* +Renders the set of all java environment variables +*/}} +{{- define "airbyte.java.envs" }} +{{- include "airbyte.java.opts.env" . }} +{{- end }} + +{{/* +Renders the set of all java config map variables +*/}} +{{- define "airbyte.java.configVars" }} +JAVA_TOOL_OPTIONS: {{ join " " .Values.global.java.opts | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_jobs.tpl b/charts/v2/airbyte/templates/config/_jobs.tpl new file mode 100644 index 00000000000..c06335434c1 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_jobs.tpl @@ -0,0 +1,267 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Jobs Configuration +*/}} + +{{/* +Renders the jobs secret name +*/}} +{{- define "airbyte.jobs.secretName" }} +{{- if .Values.global.jobs.secretName }} + {{- .Values.global.jobs.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.jobs.kube.serviceAccount value +*/}} +{{- define "airbyte.jobs.kube.serviceAccount" }} + {{- .Values.global.serviceAccountName }} +{{- end }} + +{{/* +Renders the jobs.kube.serviceAccount environment variable +*/}} +{{- define "airbyte.jobs.kube.serviceAccount.env" }} +- name: JOB_KUBE_SERVICEACCOUNT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_SERVICEACCOUNT +{{- end }} + +{{/* +Renders the global.jobs.kube.namespace value +*/}} +{{- define "airbyte.jobs.kube.namespace" }} + {{- .Values.global.jobs.kube.namespace }} +{{- end }} + +{{/* +Renders the jobs.kube.namespace environment variable +*/}} +{{- define "airbyte.jobs.kube.namespace.env" }} +- name: JOB_KUBE_NAMESPACE + valueFrom: + fieldRef: + fieldPath: metadata.namespace + +{{- end }} + +{{/* +Renders the global.jobs.kube.localVolume.enabled value +*/}} +{{- define "airbyte.jobs.kube.localVolume.enabled" }} + {{- .Values.global.jobs.kube.localVolume.enabled | default false }} +{{- end }} + +{{/* +Renders the jobs.kube.localVolume.enabled environment variable +*/}} +{{- define "airbyte.jobs.kube.localVolume.enabled.env" }} +- name: JOB_KUBE_LOCAL_VOLUME_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_LOCAL_VOLUME_ENABLED +{{- end }} + +{{/* +Renders the global.jobs.kube.images.busybox value +*/}} +{{- define "airbyte.jobs.kube.images.busybox" }} + {{- include "imageUrl" (list .Values.global.jobs.kube.images.busybox $) }} +{{- end }} + +{{/* +Renders the jobs.kube.images.busybox environment variable +*/}} +{{- define "airbyte.jobs.kube.images.busybox.env" }} +- name: JOB_KUBE_BUSYBOX_IMAGE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_BUSYBOX_IMAGE +{{- end }} + +{{/* +Renders the global.jobs.kube.images.socat value +*/}} +{{- define "airbyte.jobs.kube.images.socat" }} + {{- include "imageUrl" (list .Values.global.jobs.kube.images.socat $) }} +{{- end }} + +{{/* +Renders the jobs.kube.images.socat environment variable +*/}} +{{- define "airbyte.jobs.kube.images.socat.env" }} +- name: JOB_KUBE_SOCAT_IMAGE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_SOCAT_IMAGE +{{- end }} + +{{/* +Renders the global.jobs.kube.images.curl value +*/}} +{{- define "airbyte.jobs.kube.images.curl" }} + {{- include "imageUrl" (list .Values.global.jobs.kube.images.curl $) }} +{{- end }} + +{{/* +Renders the jobs.kube.images.curl environment variable +*/}} +{{- define "airbyte.jobs.kube.images.curl.env" }} +- name: JOB_KUBE_CURL_IMAGE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_CURL_IMAGE +{{- end }} + +{{/* +Renders the global.jobs.kube.main_container_image_pull_secret value +*/}} +{{- define "airbyte.jobs.kube.main_container_image_pull_secret" }} + {{- $imagePullSecrets := (list) }} + {{- range $.Values.global.imagePullSecrets -}}{{- $imagePullSecrets = append $imagePullSecrets .name -}}{{- end }} + {{- if $.Values.global.jobs.kube.main_container_image_pull_secret }} + {{- $imagePullSecrets = append $imagePullSecrets $.Values.global.jobs.kube.main_container_image_pull_secret }} + {{- end }} + {{ join "," $imagePullSecrets }} +{{- end }} + +{{/* +Renders the jobs.kube.main_container_image_pull_secret environment variable +*/}} +{{- define "airbyte.jobs.kube.main_container_image_pull_secret.env" }} +- name: JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET +{{- end }} + +{{/* +Renders the global.jobs.kube.annotations value +*/}} +{{- define "airbyte.jobs.kube.annotations" }} + {{- .Values.global.jobs.kube.annotations | include "airbyte.flattenMap" }} +{{- end }} + +{{/* +Renders the jobs.kube.annotations environment variable +*/}} +{{- define "airbyte.jobs.kube.annotations.env" }} +- name: JOB_KUBE_ANNOTATIONS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_ANNOTATIONS +{{- end }} + +{{/* +Renders the global.jobs.kube.labels value +*/}} +{{- define "airbyte.jobs.kube.labels" }} + {{- .Values.global.jobs.kube.labels | include "airbyte.flattenMap" }} +{{- end }} + +{{/* +Renders the jobs.kube.labels environment variable +*/}} +{{- define "airbyte.jobs.kube.labels.env" }} +- name: JOB_KUBE_LABELS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_LABELS +{{- end }} + +{{/* +Renders the global.jobs.kube.nodeSelector value +*/}} +{{- define "airbyte.jobs.kube.nodeSelector" }} + {{- .Values.global.jobs.kube.nodeSelector | include "airbyte.flattenMap" }} +{{- end }} + +{{/* +Renders the jobs.kube.nodeSelector environment variable +*/}} +{{- define "airbyte.jobs.kube.nodeSelector.env" }} +- name: JOB_KUBE_NODE_SELECTORS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_NODE_SELECTORS +{{- end }} + +{{/* +Renders the global.jobs.kube.tolerations value +*/}} +{{- define "airbyte.jobs.kube.tolerations" }} + {{- .Values.global.jobs.kube.tolerations | include "airbyte.flattenArrayMap" }} +{{- end }} + +{{/* +Renders the jobs.kube.tolerations environment variable +*/}} +{{- define "airbyte.jobs.kube.tolerations.env" }} +- name: JOB_KUBE_TOLERATIONS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_KUBE_TOLERATIONS +{{- end }} + +{{/* +Renders the jobs.errors.reportingStrategy environment variable +*/}} +{{- define "airbyte.jobs.errors.reportingStrategy.env" }} +- name: JOB_ERROR_REPORTING_STRATEGY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_ERROR_REPORTING_STRATEGY +{{- end }} + +{{/* +Renders the set of all jobs environment variables +*/}} +{{- define "airbyte.jobs.envs" }} +{{- include "airbyte.jobs.kube.serviceAccount.env" . }} +{{- include "airbyte.jobs.kube.namespace.env" . }} +{{- include "airbyte.jobs.kube.localVolume.enabled.env" . }} +{{- include "airbyte.jobs.kube.images.busybox.env" . }} +{{- include "airbyte.jobs.kube.images.socat.env" . }} +{{- include "airbyte.jobs.kube.images.curl.env" . }} +{{- include "airbyte.jobs.kube.main_container_image_pull_secret.env" . }} +{{- include "airbyte.jobs.kube.annotations.env" . }} +{{- include "airbyte.jobs.kube.labels.env" . }} +{{- include "airbyte.jobs.kube.nodeSelector.env" . }} +{{- include "airbyte.jobs.kube.tolerations.env" . }} +{{- include "airbyte.jobs.errors.reportingStrategy.env" . }} +{{- end }} + +{{/* +Renders the set of all jobs config map variables +*/}} +{{- define "airbyte.jobs.configVars" }} +JOB_KUBE_SERVICEACCOUNT: {{ .Values.global.serviceAccountName | quote }} +JOB_KUBE_NAMESPACE: {{ include "airbyte.jobs.kube.namespace" . | quote }} +JOB_KUBE_LOCAL_VOLUME_ENABLED: {{ include "airbyte.jobs.kube.localVolume.enabled" . | quote }} +JOB_KUBE_BUSYBOX_IMAGE: {{ include "imageUrl" (list .Values.global.jobs.kube.images.busybox $) | quote }} +JOB_KUBE_SOCAT_IMAGE: {{ include "imageUrl" (list .Values.global.jobs.kube.images.socat $) | quote }} +JOB_KUBE_CURL_IMAGE: {{ include "imageUrl" (list .Values.global.jobs.kube.images.curl $) | quote }} +JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET: {{ include "airbyte.jobs.kube.main_container_image_pull_secret" . | quote }} +JOB_KUBE_ANNOTATIONS: {{ .Values.global.jobs.kube.annotations | include "airbyte.flattenMap" | quote }} +JOB_KUBE_LABELS: {{ .Values.global.jobs.kube.labels | include "airbyte.flattenMap" | quote }} +JOB_KUBE_NODE_SELECTORS: {{ .Values.global.jobs.kube.nodeSelector | include "airbyte.flattenMap" | quote }} +JOB_KUBE_TOLERATIONS: {{ .Values.global.jobs.kube.tolerations | include "airbyte.flattenArrayMap" | quote }} +JOB_ERROR_REPORTING_STRATEGY: {{ "logging" | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_keycloak.tpl b/charts/v2/airbyte/templates/config/_keycloak.tpl new file mode 100644 index 00000000000..3af3e843ec8 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_keycloak.tpl @@ -0,0 +1,454 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Keycloak Configuration +*/}} + +{{/* +Renders the keycloak.service.port value +*/}} +{{- define "airbyte.keycloak.service.port" }} + {{- .Values.keycloak.service.port | default 8081 }} +{{- end }} + +{{/* +Renders the keycloak.service.port environment variable +*/}} +{{- define "airbyte.keycloak.service.port.env" }} +- name: KEYCLOAK_PORT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_PORT +{{- end }} + +{{/* +Renders the keycloak.javaOpts value +*/}} +{{- define "airbyte.keycloak.javaOpts" }} + {{- (printf "-Djgroups.dns.query=%s-airbyte-keycloak-headless-svc" .Release.Name) }} +{{- end }} + +{{/* +Renders the keycloak.javaOpts environment variable +*/}} +{{- define "airbyte.keycloak.javaOpts.env" }} +- name: JAVA_OPTS_APPEND + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JAVA_OPTS_APPEND +{{- end }} + +{{/* +Renders the set of all keycloak environment variables +*/}} +{{- define "airbyte.keycloak.envs" }} +{{- include "airbyte.keycloak.service.port.env" . }} +{{- include "airbyte.keycloak.javaOpts.env" . }} +{{- end }} + +{{/* +Renders the set of all keycloak config map variables +*/}} +{{- define "airbyte.keycloak.configVars" }} +KEYCLOAK_PORT: {{ include "airbyte.keycloak.service.port" . | quote }} +JAVA_OPTS_APPEND: {{ (printf "-Djgroups.dns.query=%s-airbyte-keycloak-headless-svc" .Release.Name) | quote }} +{{- end }} + +{{/* +Renders the keycloak.admin.client secret name +*/}} +{{- define "airbyte.keycloak.admin.client.secretName" }} +{{- if .Values.keycloak.secretName }} + {{- .Values.keycloak.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the keycloak.auth.adminRealm value +*/}} +{{- define "airbyte.keycloak.admin.client.auth.adminRealm" }} + {{- .Values.keycloak.auth.adminRealm }} +{{- end }} + +{{/* +Renders the keycloak.admin.client.auth.adminRealm environment variable +*/}} +{{- define "airbyte.keycloak.admin.client.auth.adminRealm.env" }} +- name: KEYCLOAK_ADMIN_REALM + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_ADMIN_REALM +{{- end }} + +{{/* +Renders the keycloak.auth.adminCliClientId value +*/}} +{{- define "airbyte.keycloak.admin.client.auth.adminCliClientId" }} + {{- .Values.keycloak.auth.adminCliClientId }} +{{- end }} + +{{/* +Renders the keycloak.admin.client.auth.adminCliClientId environment variable +*/}} +{{- define "airbyte.keycloak.admin.client.auth.adminCliClientId.env" }} +- name: KEYCLOAK_ADMIN_CLI_CLIENT_ID + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_ADMIN_CLI_CLIENT_ID +{{- end }} + +{{/* +Renders the keycloak.clientRealm value +*/}} +{{- define "airbyte.keycloak.admin.client.clientRealm" }} + {{- .Values.keycloak.clientRealm | default "_airbyte-application-clients" }} +{{- end }} + +{{/* +Renders the keycloak.admin.client.clientRealm environment variable +*/}} +{{- define "airbyte.keycloak.admin.client.clientRealm.env" }} +- name: KEYCLOAK_CLIENT_REALM + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_CLIENT_REALM +{{- end }} + +{{/* +Renders the keycloak.internalBasePath value +*/}} +{{- define "airbyte.keycloak.admin.client.internalBasePath" }} + {{- .Values.keycloak.internalBasePath | default "/auth" }} +{{- end }} + +{{/* +Renders the keycloak.admin.client.internalBasePath environment variable +*/}} +{{- define "airbyte.keycloak.admin.client.internalBasePath.env" }} +- name: KEYCLOAK_INTERNAL_BASE_PATH + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_INTERNAL_BASE_PATH +{{- end }} + +{{/* +Renders the keycloak.internalHost value +*/}} +{{- define "airbyte.keycloak.admin.client.internalHost" }} + {{- ternary (printf "%s-airbyte-keycloak-svc:%d" .Release.Name (int .Values.keycloak.service.port)) "localhost" (or (eq .Values.global.edition "enterprise") (eq .Values.global.edition "pro")) }} +{{- end }} + +{{/* +Renders the keycloak.admin.client.internalHost environment variable +*/}} +{{- define "airbyte.keycloak.admin.client.internalHost.env" }} +- name: KEYCLOAK_INTERNAL_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_INTERNAL_HOST +{{- end }} + +{{/* +Renders the keycloak.admin.client.internalProtocol environment variable +*/}} +{{- define "airbyte.keycloak.admin.client.internalProtocol.env" }} +- name: KEYCLOAK_INTERNAL_PROTOCOL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_INTERNAL_PROTOCOL +{{- end }} + +{{/* +Renders the keycloak.realmIssuer value +*/}} +{{- define "airbyte.keycloak.admin.client.realmIssuer" }} + {{- ternary (printf "%s/auth/realms/_airbyte-internal" .Values.global.airbyteUrl) (printf "%s-airbyte-keycloak-svc:8001/auth/realms/_airbyte-internal" .Release.Name) (eq (include "airbyte.common.cluster.type" .) "data-plane") }} +{{- end }} + +{{/* +Renders the keycloak.admin.client.realmIssuer environment variable +*/}} +{{- define "airbyte.keycloak.admin.client.realmIssuer.env" }} +- name: KEYCLOAK_INTERNAL_REALM_ISSUER + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_INTERNAL_REALM_ISSUER +{{- end }} + +{{/* +Renders the set of all keycloak.admin.client environment variables +*/}} +{{- define "airbyte.keycloak.admin.client.envs" }} +{{- include "airbyte.keycloak.admin.client.auth.adminRealm.env" . }} +{{- include "airbyte.keycloak.admin.client.auth.adminCliClientId.env" . }} +{{- include "airbyte.keycloak.admin.client.clientRealm.env" . }} +{{- include "airbyte.keycloak.admin.client.internalBasePath.env" . }} +{{- include "airbyte.keycloak.admin.client.internalHost.env" . }} +{{- include "airbyte.keycloak.admin.client.internalProtocol.env" . }} +{{- include "airbyte.keycloak.admin.client.realmIssuer.env" . }} +{{- end }} + +{{/* +Renders the set of all keycloak.admin.client config map variables +*/}} +{{- define "airbyte.keycloak.admin.client.configVars" }} +KEYCLOAK_ADMIN_REALM: {{ include "airbyte.keycloak.admin.client.auth.adminRealm" . | quote }} +KEYCLOAK_ADMIN_CLI_CLIENT_ID: {{ include "airbyte.keycloak.admin.client.auth.adminCliClientId" . | quote }} +KEYCLOAK_CLIENT_REALM: {{ include "airbyte.keycloak.admin.client.clientRealm" . | quote }} +KEYCLOAK_INTERNAL_BASE_PATH: {{ include "airbyte.keycloak.admin.client.internalBasePath" . | quote }} +KEYCLOAK_INTERNAL_HOST: {{ ternary (printf "%s-airbyte-keycloak-svc:%d" .Release.Name (int .Values.keycloak.service.port)) "localhost" (or (eq .Values.global.edition "enterprise") (eq .Values.global.edition "pro")) | quote }} +KEYCLOAK_INTERNAL_PROTOCOL: {{ "http" | quote }} +KEYCLOAK_INTERNAL_REALM_ISSUER: {{ ternary (printf "%s/auth/realms/_airbyte-internal" .Values.global.airbyteUrl) (printf "%s-airbyte-keycloak-svc:8001/auth/realms/_airbyte-internal" .Release.Name) (eq (include "airbyte.common.cluster.type" .) "data-plane") | quote }} +{{- end }} + +{{/* +Renders the keycloak.admin.user secret name +*/}} +{{- define "airbyte.keycloak.admin.user.secretName" }} +{{- if .Values.keycloak.secretName }} + {{- .Values.keycloak.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the keycloak.auth.adminUsername value +*/}} +{{- define "airbyte.keycloak.admin.user.auth.adminUsername" }} + {{- .Values.keycloak.auth.adminUsername }} +{{- end }} + +{{/* +Renders the keycloak.admin.user.auth.adminUsername secret key +*/}} +{{- define "airbyte.keycloak.admin.user.auth.adminUsername.secretKey" }} + {{- .Values.keycloak.auth.adminUsernameSecretKey | default "KEYCLOAK_ADMIN_USER" }} +{{- end }} + +{{/* +Renders the keycloak.admin.user.auth.adminUsername environment variable +*/}} +{{- define "airbyte.keycloak.admin.user.auth.adminUsername.env" }} +- name: KEYCLOAK_ADMIN_USER + valueFrom: + secretKeyRef: + name: {{ include "airbyte.keycloak.admin.user.secretName" . }} + key: {{ include "airbyte.keycloak.admin.user.auth.adminUsername.secretKey" . }} +{{- end }} + +{{/* +Renders the keycloak.auth.adminPassword value +*/}} +{{- define "airbyte.keycloak.admin.user.auth.adminPassword" }} + {{- .Values.keycloak.auth.adminPassword }} +{{- end }} + +{{/* +Renders the keycloak.admin.user.auth.adminPassword secret key +*/}} +{{- define "airbyte.keycloak.admin.user.auth.adminPassword.secretKey" }} + {{- .Values.keycloak.auth.adminPasswordSecretKey | default "KEYCLOAK_ADMIN_PASSWORD" }} +{{- end }} + +{{/* +Renders the keycloak.admin.user.auth.adminPassword environment variable +*/}} +{{- define "airbyte.keycloak.admin.user.auth.adminPassword.env" }} +- name: KEYCLOAK_ADMIN_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "airbyte.keycloak.admin.user.secretName" . }} + key: {{ include "airbyte.keycloak.admin.user.auth.adminPassword.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all keycloak.admin.user environment variables +*/}} +{{- define "airbyte.keycloak.admin.user.envs" }} +{{- include "airbyte.keycloak.admin.user.auth.adminUsername.env" . }} +{{- include "airbyte.keycloak.admin.user.auth.adminPassword.env" . }} +{{- end }} + +{{/* +Renders the set of all keycloak.admin.user secret variables +*/}} +{{- define "airbyte.keycloak.admin.user.secrets" }} +KEYCLOAK_ADMIN_USER: {{ include "airbyte.keycloak.admin.user.auth.adminUsername" . | quote }} +KEYCLOAK_ADMIN_PASSWORD: {{ include "airbyte.keycloak.admin.user.auth.adminPassword" . | quote }} +{{- end }} + +{{/* +Renders the keycloak.database secret name +*/}} +{{- define "airbyte.keycloak.database.secretName" }} +{{- if .Values.keycloak.database.secretName }} + {{- .Values.keycloak.database.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the keycloak.database.name value +*/}} +{{- define "airbyte.keycloak.database.name" }} + {{- .Values.keycloak.database.name | default "db-airbyte" }} +{{- end }} + +{{/* +Renders the keycloak.database.name environment variable +*/}} +{{- define "airbyte.keycloak.database.name.env" }} +- name: KEYCLOAK_DATABASE_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_DATABASE_NAME +{{- end }} + +{{/* +Renders the keycloak.database.host value +*/}} +{{- define "airbyte.keycloak.database.host" }} + {{- .Values.keycloak.database.host | default (printf "airbyte-db-svc.%s.svc.cluster.local" .Release.Namespace) }} +{{- end }} + +{{/* +Renders the keycloak.database.host environment variable +*/}} +{{- define "airbyte.keycloak.database.host.env" }} +- name: KEYCLOAK_DATABASE_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_DATABASE_HOST +{{- end }} + +{{/* +Renders the keycloak.database.port value +*/}} +{{- define "airbyte.keycloak.database.port" }} + {{- .Values.keycloak.database.port | default 5432 }} +{{- end }} + +{{/* +Renders the keycloak.database.port environment variable +*/}} +{{- define "airbyte.keycloak.database.port.env" }} +- name: KEYCLOAK_DATABASE_PORT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_DATABASE_PORT +{{- end }} + +{{/* +Renders the keycloak.database.user value +*/}} +{{- define "airbyte.keycloak.database.user" }} + {{- .Values.keycloak.database.user | default "airbyte" }} +{{- end }} + +{{/* +Renders the keycloak.database.user secret key +*/}} +{{- define "airbyte.keycloak.database.user.secretKey" }} + {{- .Values.keycloak.database.userSecretKey | default "KEYCLOAK_DATABASE_USERNAME" }} +{{- end }} + +{{/* +Renders the keycloak.database.user environment variable +*/}} +{{- define "airbyte.keycloak.database.user.env" }} +- name: KEYCLOAK_DATABASE_USERNAME + valueFrom: + secretKeyRef: + name: {{ include "airbyte.keycloak.database.secretName" . }} + key: {{ include "airbyte.keycloak.database.user.secretKey" . }} +{{- end }} + +{{/* +Renders the keycloak.database.password value +*/}} +{{- define "airbyte.keycloak.database.password" }} + {{- .Values.keycloak.database.password | default "airbyte" }} +{{- end }} + +{{/* +Renders the keycloak.database.password secret key +*/}} +{{- define "airbyte.keycloak.database.password.secretKey" }} + {{- .Values.keycloak.database.passwordSecretKey | default "KEYCLOAK_DATABASE_PASSWORD" }} +{{- end }} + +{{/* +Renders the keycloak.database.password environment variable +*/}} +{{- define "airbyte.keycloak.database.password.env" }} +- name: KEYCLOAK_DATABASE_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "airbyte.keycloak.database.secretName" . }} + key: {{ include "airbyte.keycloak.database.password.secretKey" . }} +{{- end }} + +{{/* +Renders the keycloak.database.url value +*/}} +{{- define "airbyte.keycloak.database.url" }} + {{- (printf "jdbc:postgresql://%s:%d/%s?currentSchema=keycloak" (include "airbyte.keycloak.database.host" .) (int (include "airbyte.keycloak.database.port" .)) (include "airbyte.keycloak.database.name" .)) }} +{{- end }} + +{{/* +Renders the keycloak.database.url environment variable +*/}} +{{- define "airbyte.keycloak.database.url.env" }} +- name: KEYCLOAK_DATABASE_URL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KEYCLOAK_DATABASE_URL +{{- end }} + +{{/* +Renders the set of all keycloak.database environment variables +*/}} +{{- define "airbyte.keycloak.database.envs" }} +{{- include "airbyte.keycloak.database.name.env" . }} +{{- include "airbyte.keycloak.database.host.env" . }} +{{- include "airbyte.keycloak.database.port.env" . }} +{{- include "airbyte.keycloak.database.user.env" . }} +{{- include "airbyte.keycloak.database.password.env" . }} +{{- include "airbyte.keycloak.database.url.env" . }} +{{- end }} + +{{/* +Renders the set of all keycloak.database config map variables +*/}} +{{- define "airbyte.keycloak.database.configVars" }} +KEYCLOAK_DATABASE_NAME: {{ include "airbyte.keycloak.database.name" . | quote }} +KEYCLOAK_DATABASE_HOST: {{ include "airbyte.keycloak.database.host" . | quote }} +KEYCLOAK_DATABASE_PORT: {{ include "airbyte.keycloak.database.port" . | quote }} +KEYCLOAK_DATABASE_URL: {{ (printf "jdbc:postgresql://%s:%d/%s?currentSchema=keycloak" (include "airbyte.keycloak.database.host" .) (int (include "airbyte.keycloak.database.port" .)) (include "airbyte.keycloak.database.name" .)) | quote }} +{{- end }} + +{{/* +Renders the set of all keycloak.database secret variables +*/}} +{{- define "airbyte.keycloak.database.secrets" }} +KEYCLOAK_DATABASE_USERNAME: {{ include "airbyte.keycloak.database.user" . | quote }} +KEYCLOAK_DATABASE_PASSWORD: {{ include "airbyte.keycloak.database.password" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_logging.tpl b/charts/v2/airbyte/templates/config/_logging.tpl new file mode 100644 index 00000000000..47e3cc1b359 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_logging.tpl @@ -0,0 +1,49 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Logging Configuration +*/}} + +{{/* +Renders the logging secret name +*/}} +{{- define "airbyte.logging.secretName" }} +{{- if .Values.global.logging.secretName }} + {{- .Values.global.logging.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.logging.level value +*/}} +{{- define "airbyte.logging.level" }} + {{- .Values.global.logging.level | default "INFO" }} +{{- end }} + +{{/* +Renders the logging.level environment variable +*/}} +{{- define "airbyte.logging.level.env" }} +- name: LOG_LEVEL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: LOG_LEVEL +{{- end }} + +{{/* +Renders the set of all logging environment variables +*/}} +{{- define "airbyte.logging.envs" }} +{{- include "airbyte.logging.level.env" . }} +{{- end }} + +{{/* +Renders the set of all logging config map variables +*/}} +{{- define "airbyte.logging.configVars" }} +LOG_LEVEL: {{ include "airbyte.logging.level" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_metrics.tpl b/charts/v2/airbyte/templates/config/_metrics.tpl new file mode 100644 index 00000000000..7b84fce3a2c --- /dev/null +++ b/charts/v2/airbyte/templates/config/_metrics.tpl @@ -0,0 +1,149 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Metrics Configuration +*/}} + +{{/* +Renders the metrics secret name +*/}} +{{- define "airbyte.metrics.secretName" }} +{{- if .Values.global.metrics.secretName }} + {{- .Values.global.metrics.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.metrics.client value +*/}} +{{- define "airbyte.metrics.client" }} + {{- .Values.global.metrics.client }} +{{- end }} + +{{/* +Renders the metrics.client environment variable +*/}} +{{- define "airbyte.metrics.client.env" }} +- name: METRIC_CLIENT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: METRIC_CLIENT +{{- end }} + +{{/* +Renders the global.metrics.micrometer.enabled value +*/}} +{{- define "airbyte.metrics.micrometer.enabled" }} + {{- .Values.global.metrics.micrometer.enabled | default false }} +{{- end }} + +{{/* +Renders the metrics.micrometer.enabled environment variable +*/}} +{{- define "airbyte.metrics.micrometer.enabled.env" }} +- name: MICROMETER_METRICS_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: MICROMETER_METRICS_ENABLED +{{- end }} + +{{/* +Renders the global.metrics.micrometer.statsdFlavor value +*/}} +{{- define "airbyte.metrics.micrometer.statsdFlavor" }} + {{- .Values.global.metrics.micrometer.statsdFlavor | default "datadog" }} +{{- end }} + +{{/* +Renders the metrics.micrometer.statsdFlavor environment variable +*/}} +{{- define "airbyte.metrics.micrometer.statsdFlavor.env" }} +- name: MICROMETER_METRICS_STATSD_FLAVOR + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: MICROMETER_METRICS_STATSD_FLAVOR +{{- end }} + +{{/* +Renders the global.metrics.otel.collector.endpoint value +*/}} +{{- define "airbyte.metrics.otel.collector.endpoint" }} + {{- .Values.global.metrics.otel.collector.endpoint }} +{{- end }} + +{{/* +Renders the metrics.otel.collector.endpoint environment variable +*/}} +{{- define "airbyte.metrics.otel.collector.endpoint.env" }} +- name: OTEL_COLLECTOR_ENDPOINT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OTEL_COLLECTOR_ENDPOINT +{{- end }} + +{{/* +Renders the global.metrics.statsd.host value +*/}} +{{- define "airbyte.metrics.statsd.host" }} + {{- .Values.global.metrics.statsd.host | default "localhost" }} +{{- end }} + +{{/* +Renders the metrics.statsd.host environment variable +*/}} +{{- define "airbyte.metrics.statsd.host.env" }} +- name: STATSD_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: STATSD_HOST +{{- end }} + +{{/* +Renders the global.metrics.statsd.port value +*/}} +{{- define "airbyte.metrics.statsd.port" }} + {{- .Values.global.metrics.statsd.port | default 8125 }} +{{- end }} + +{{/* +Renders the metrics.statsd.port environment variable +*/}} +{{- define "airbyte.metrics.statsd.port.env" }} +- name: STATSD_PORT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: STATSD_PORT +{{- end }} + +{{/* +Renders the set of all metrics environment variables +*/}} +{{- define "airbyte.metrics.envs" }} +{{- include "airbyte.metrics.client.env" . }} +{{- include "airbyte.metrics.micrometer.enabled.env" . }} +{{- include "airbyte.metrics.micrometer.statsdFlavor.env" . }} +{{- include "airbyte.metrics.otel.collector.endpoint.env" . }} +{{- include "airbyte.metrics.statsd.host.env" . }} +{{- include "airbyte.metrics.statsd.port.env" . }} +{{- end }} + +{{/* +Renders the set of all metrics config map variables +*/}} +{{- define "airbyte.metrics.configVars" }} +METRIC_CLIENT: {{ include "airbyte.metrics.client" . | quote }} +MICROMETER_METRICS_ENABLED: {{ include "airbyte.metrics.micrometer.enabled" . | quote }} +MICROMETER_METRICS_STATSD_FLAVOR: {{ include "airbyte.metrics.micrometer.statsdFlavor" . | quote }} +OTEL_COLLECTOR_ENDPOINT: {{ include "airbyte.metrics.otel.collector.endpoint" . | quote }} +STATSD_HOST: {{ include "airbyte.metrics.statsd.host" . | quote }} +STATSD_PORT: {{ include "airbyte.metrics.statsd.port" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_micronaut.tpl b/charts/v2/airbyte/templates/config/_micronaut.tpl new file mode 100644 index 00000000000..02d8b8dc12b --- /dev/null +++ b/charts/v2/airbyte/templates/config/_micronaut.tpl @@ -0,0 +1,49 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Micronaut Configuration +*/}} + +{{/* +Renders the micronaut secret name +*/}} +{{- define "airbyte.micronaut.secretName" }} +{{- if .Values.global.micronaut.secretName }} + {{- .Values.global.micronaut.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.micronaut.environments value +*/}} +{{- define "airbyte.micronaut.environments" }} + {{- join "," (append .Values.global.micronaut.environments (include "airbyte.common.cluster.type" .)) }} +{{- end }} + +{{/* +Renders the micronaut.environments environment variable +*/}} +{{- define "airbyte.micronaut.environments.env" }} +- name: MICRONAUT_ENVIRONMENTS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: MICRONAUT_ENVIRONMENTS +{{- end }} + +{{/* +Renders the set of all micronaut environment variables +*/}} +{{- define "airbyte.micronaut.envs" }} +{{- include "airbyte.micronaut.environments.env" . }} +{{- end }} + +{{/* +Renders the set of all micronaut config map variables +*/}} +{{- define "airbyte.micronaut.configVars" }} +MICRONAUT_ENVIRONMENTS: {{ join "," (append .Values.global.micronaut.environments (include "airbyte.common.cluster.type" .)) | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_minio.tpl b/charts/v2/airbyte/templates/config/_minio.tpl new file mode 100644 index 00000000000..e0425907169 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_minio.tpl @@ -0,0 +1,50 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Minio Configuration +*/}} + +{{/* +Renders the minio.rootUser secret key +*/}} +{{- define "airbyte.minio.rootUser.secretKey" }} + {{- .Values.minio.rootUserSecretKey | default "AWS_ACCESS_KEY_ID" }} +{{- end }} + +{{/* +Renders the minio.rootUser environment variable +*/}} +{{- define "airbyte.minio.rootUser.env" }} +- name: MINIO_ROOT_USER + valueFrom: + secretKeyRef: + name: {{ include "airbyte.storage.secretName" . }} + key: {{ include "airbyte.minio.rootUser.secretKey" . }} +{{- end }} + +{{/* +Renders the minio.rootPassword secret key +*/}} +{{- define "airbyte.minio.rootPassword.secretKey" }} + {{- .Values.minio.rootPasswordSecretKey | default "AWS_SECRET_ACCESS_KEY" }} +{{- end }} + +{{/* +Renders the minio.rootPassword environment variable +*/}} +{{- define "airbyte.minio.rootPassword.env" }} +- name: MINIO_ROOT_PASSWORD + valueFrom: + secretKeyRef: + name: {{ include "airbyte.storage.secretName" . }} + key: {{ include "airbyte.minio.rootPassword.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all minio environment variables +*/}} +{{- define "airbyte.minio.envs" }} +{{- include "airbyte.minio.rootUser.env" . }} +{{- include "airbyte.minio.rootPassword.env" . }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_otel.tpl b/charts/v2/airbyte/templates/config/_otel.tpl new file mode 100644 index 00000000000..2054701e4a7 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_otel.tpl @@ -0,0 +1,114 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Otel Configuration +*/}} + +{{/* +Renders the otel secret name +*/}} +{{- define "airbyte.otel.secretName" }} +{{- if .Values.global.metrics.otel.secretName }} + {{- .Values.global.metrics.otel.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the otel.otel.exporter environment variable +*/}} +{{- define "airbyte.otel.otel.exporter.env" }} +- name: OTEL_EXPORTER_OTLP_ENDPOINT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OTEL_EXPORTER_OTLP_ENDPOINT +{{- end }} + +{{/* +Renders the otel.otel.exporter.protocol environment variable +*/}} +{{- define "airbyte.otel.otel.exporter.protocol.env" }} +- name: OTEL_EXPORTER_OTLP_PROTOCOL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OTEL_EXPORTER_OTLP_PROTOCOL +{{- end }} + +{{/* +Renders the otel.otel.exporter.timeout environment variable +*/}} +{{- define "airbyte.otel.otel.exporter.timeout.env" }} +- name: OTEL_EXPORTER_OTLP_TIMEOUT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OTEL_EXPORTER_OTLP_TIMEOUT +{{- end }} + +{{/* +Renders the otel.otel.exporter.metricExportInterval environment variable +*/}} +{{- define "airbyte.otel.otel.exporter.metricExportInterval.env" }} +- name: OTEL_METRIC_EXPORT_INTERVAL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OTEL_METRIC_EXPORT_INTERVAL +{{- end }} + +{{/* +Renders the otel.otel.exporter.name environment variable +*/}} +{{- define "airbyte.otel.otel.exporter.name.env" }} +- name: OTEL_METRICS_EXPORTER + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OTEL_METRICS_EXPORTER +{{- end }} + +{{/* +Renders the global.metrics.otel.otel.resourceAttributes value +*/}} +{{- define "airbyte.otel.otel.resourceAttributes" }} + {{- (printf "service.name=%s,deployment.environment=%s,service.version=%s" (include "airbyte.componentName" .) .Values.global.env (include "airbyte.common.version" .)) }} +{{- end }} + +{{/* +Renders the otel.otel.resourceAttributes environment variable +*/}} +{{- define "airbyte.otel.otel.resourceAttributes.env" }} +- name: OTEL_RESOURCE_ATTRIBUTES + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: OTEL_RESOURCE_ATTRIBUTES +{{- end }} + +{{/* +Renders the set of all otel environment variables +*/}} +{{- define "airbyte.otel.envs" }} +{{- include "airbyte.otel.otel.exporter.env" . }} +{{- include "airbyte.otel.otel.exporter.protocol.env" . }} +{{- include "airbyte.otel.otel.exporter.timeout.env" . }} +{{- include "airbyte.otel.otel.exporter.metricExportInterval.env" . }} +{{- include "airbyte.otel.otel.exporter.name.env" . }} +{{- include "airbyte.otel.otel.resourceAttributes.env" . }} +{{- end }} + +{{/* +Renders the set of all otel config map variables +*/}} +{{- define "airbyte.otel.configVars" }} +OTEL_EXPORTER_OTLP_ENDPOINT: {{ "http://$(DD_AGENT_HOST):4317" | quote }} +OTEL_EXPORTER_OTLP_PROTOCOL: {{ "grpc" | quote }} +OTEL_EXPORTER_OTLP_TIMEOUT: {{ 30000 | quote }} +OTEL_METRIC_EXPORT_INTERVAL: {{ 10000 | quote }} +OTEL_METRICS_EXPORTER: {{ "otlp" | quote }} +OTEL_RESOURCE_ATTRIBUTES: {{ (printf "service.name=%s,deployment.environment=%s,service.version=%s" (include "airbyte.componentName" .) .Values.global.env (include "airbyte.common.version" .)) | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_secretsManager.tpl b/charts/v2/airbyte/templates/config/_secretsManager.tpl new file mode 100644 index 00000000000..fc22df652df --- /dev/null +++ b/charts/v2/airbyte/templates/config/_secretsManager.tpl @@ -0,0 +1,508 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Secretsmanager Configuration +*/}} + +{{/* +Renders the secretsManager secret name +*/}} +{{- define "airbyte.secretsManager.secretName" }} +{{- if .Values.global.secretsManager.secretName }} + {{- .Values.global.secretsManager.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.secretsManager.type value +*/}} +{{- define "airbyte.secretsManager.type" }} + {{- .Values.global.secretsManager.type }} +{{- end }} + +{{/* +Renders the secretsManager.type environment variable +*/}} +{{- define "airbyte.secretsManager.type.env" }} +- name: SECRET_PERSISTENCE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SECRET_PERSISTENCE +{{- end }} + +{{/* +Renders the global.secretsManager.awsSecretManager.accessKeyId value +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.accessKeyId" }} + {{- .Values.global.secretsManager.awsSecretManager.accessKeyId }} +{{- end }} + +{{/* +Renders the secretsManager.awsSecretManager.accessKeyId secret key +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.accessKeyId.secretKey" }} + {{- .Values.global.secretsManager.awsSecretManager.accessKeyIdSecretKey | default "AWS_SECRET_MANAGER_ACCESS_KEY_ID" }} +{{- end }} + +{{/* +Renders the secretsManager.awsSecretManager.accessKeyId environment variable +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.accessKeyId.env" }} +- name: AWS_SECRET_MANAGER_ACCESS_KEY_ID + valueFrom: + secretKeyRef: + name: {{ include "airbyte.secretsManager.secretName" . }} + key: {{ include "airbyte.secretsManager.awsSecretManager.accessKeyId.secretKey" . }} +{{- end }} + +{{/* +Renders the global.secretsManager.awsSecretManager.secretAccessKey value +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.secretAccessKey" }} + {{- .Values.global.secretsManager.awsSecretManager.secretAccessKey }} +{{- end }} + +{{/* +Renders the secretsManager.awsSecretManager.secretAccessKey secret key +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.secretAccessKey.secretKey" }} + {{- .Values.global.secretsManager.awsSecretManager.secretAccessKeySecretKey | default "AWS_SECRET_MANAGER_SECRET_ACCESS_KEY" }} +{{- end }} + +{{/* +Renders the secretsManager.awsSecretManager.secretAccessKey environment variable +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.secretAccessKey.env" }} +- name: AWS_SECRET_MANAGER_SECRET_ACCESS_KEY + valueFrom: + secretKeyRef: + name: {{ include "airbyte.secretsManager.secretName" . }} + key: {{ include "airbyte.secretsManager.awsSecretManager.secretAccessKey.secretKey" . }} +{{- end }} + +{{/* +Renders the global.secretsManager.awsSecretManager.region value +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.region" }} + {{- .Values.global.secretsManager.awsSecretManager.region }} +{{- end }} + +{{/* +Renders the secretsManager.awsSecretManager.region environment variable +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.region.env" }} +- name: AWS_SECRET_MANAGER_REGION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AWS_SECRET_MANAGER_REGION +{{- end }} + +{{/* +Renders the global.secretsManager.awsSecretManager.tags value +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.tags" }} + {{- include "airbyte.tagsToString" .Values.global.secretsManager.awsSecretManager.tags }} +{{- end }} + +{{/* +Renders the secretsManager.awsSecretManager.tags environment variable +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.tags.env" }} +- name: AWS_SECRET_MANAGER_SECRET_TAGS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AWS_SECRET_MANAGER_SECRET_TAGS +{{- end }} + +{{/* +Renders the global.secretsManager.awsSecretManager.kmsArn value +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.kmsArn" }} + {{- .Values.global.secretsManager.awsSecretManager.kmsArn | default "" }} +{{- end }} + +{{/* +Renders the secretsManager.awsSecretManager.kmsArn environment variable +*/}} +{{- define "airbyte.secretsManager.awsSecretManager.kmsArn.env" }} +- name: AWS_KMS_KEY_ARN + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AWS_KMS_KEY_ARN +{{- end }} + +{{/* +Renders the global.secretsManager.azureKeyVault.clientId value +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientId" }} + {{- .Values.global.secretsManager.azureKeyVault.clientId }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.clientId secret key +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientId.secretKey" }} + {{- .Values.global.secretsManager.azureKeyVault.clientIdSecretKey | default "AB_AZURE_KEY_VAULT_CLIENT_ID" }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.clientId environment variable +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientId.env" }} +- name: AB_AZURE_KEY_VAULT_CLIENT_ID + valueFrom: + secretKeyRef: + name: {{ include "airbyte.secretsManager.secretName" . }} + key: {{ include "airbyte.secretsManager.azureKeyVault.clientId.secretKey" . }} +{{- end }} + +{{/* +Renders the global.secretsManager.azureKeyVault.clientSecret value +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientSecret" }} + {{- .Values.global.secretsManager.azureKeyVault.clientSecret }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.clientSecret secret key +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientSecret.secretKey" }} + {{- .Values.global.secretsManager.azureKeyVault.clientSecretSecretKey | default "AB_AZURE_KEY_VAULT_CLIENT_SECRET" }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.clientSecret environment variable +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientSecret.env" }} +- name: AB_AZURE_KEY_VAULT_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: {{ include "airbyte.secretsManager.secretName" . }} + key: {{ include "airbyte.secretsManager.azureKeyVault.clientSecret.secretKey" . }} +{{- end }} + +{{/* +Renders the global.secretsManager.azureKeyVault.tenantId value +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.tenantId" }} + {{- .Values.global.secretsManager.azureKeyVault.tenantId }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.tenantId environment variable +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.tenantId.env" }} +- name: AB_AZURE_KEY_VAULT_TENANT_ID + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_AZURE_KEY_VAULT_TENANT_ID +{{- end }} + +{{/* +Renders the global.secretsManager.azureKeyVault.vaultUrl value +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.vaultUrl" }} + {{- .Values.global.secretsManager.azureKeyVault.vaultUrl }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.vaultUrl environment variable +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.vaultUrl.env" }} +- name: AB_AZURE_KEY_VAULT_VAULT_URL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_AZURE_KEY_VAULT_VAULT_URL +{{- end }} + +{{/* +Renders the global.secretsManager.azureKeyVault.clientIdRefName value +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientIdRefName" }} + {{- include "airbyte.secretsManager.secretName" . }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.clientIdRefName environment variable +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientIdRefName.env" }} +- name: AB_AZURE_KEY_CLIENT_ID_REF_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_AZURE_KEY_CLIENT_ID_REF_NAME +{{- end }} + +{{/* +Renders the global.secretsManager.azureKeyVault.clientIdRefKey value +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientIdRefKey" }} + {{- .Values.global.secretsManager.azureKeyVault.clientIdSecretKey | default "AB_AZURE_KEY_VAULT_CLIENT_ID" }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.clientIdRefKey environment variable +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientIdRefKey.env" }} +- name: AB_AZURE_KEY_CLIENT_ID_REF_KEY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_AZURE_KEY_CLIENT_ID_REF_KEY +{{- end }} + +{{/* +Renders the global.secretsManager.azureKeyVault.clientSecretRefName value +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientSecretRefName" }} + {{- include "airbyte.secretsManager.secretName" . }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.clientSecretRefName environment variable +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientSecretRefName.env" }} +- name: AB_AZURE_KEY_CLIENT_SECRET_REF_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_AZURE_KEY_CLIENT_SECRET_REF_NAME +{{- end }} + +{{/* +Renders the global.secretsManager.azureKeyVault.clientSecretRefKey value +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientSecretRefKey" }} + {{- .Values.global.secretsManager.azureKeyVault.clientSecretSecretKey | default "AB_AZURE_KEY_VAULT_CLIENT_SECRET" }} +{{- end }} + +{{/* +Renders the secretsManager.azureKeyVault.clientSecretRefKey environment variable +*/}} +{{- define "airbyte.secretsManager.azureKeyVault.clientSecretRefKey.env" }} +- name: AB_AZURE_KEY_CLIENT_SECRET_REF_KEY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AB_AZURE_KEY_CLIENT_SECRET_REF_KEY +{{- end }} + +{{/* +Renders the global.secretsManager.googleSecretManager.projectId value +*/}} +{{- define "airbyte.secretsManager.googleSecretManager.projectId" }} + {{- .Values.global.secretsManager.googleSecretManager.projectId }} +{{- end }} + +{{/* +Renders the secretsManager.googleSecretManager.projectId environment variable +*/}} +{{- define "airbyte.secretsManager.googleSecretManager.projectId.env" }} +- name: SECRET_STORE_GCP_PROJECT_ID + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SECRET_STORE_GCP_PROJECT_ID +{{- end }} + +{{/* +Renders the global.secretsManager.googleSecretManager.credentials value +*/}} +{{- define "airbyte.secretsManager.googleSecretManager.credentials" }} + {{- .Values.global.secretsManager.googleSecretManager.credentials }} +{{- end }} + +{{/* +Renders the secretsManager.googleSecretManager.credentials secret key +*/}} +{{- define "airbyte.secretsManager.googleSecretManager.credentials.secretKey" }} + {{- .Values.global.secretsManager.googleSecretManager.credentialsSecretKey | default "SECRET_STORE_GCP_CREDENTIALS" }} +{{- end }} + +{{/* +Renders the secretsManager.googleSecretManager.credentials environment variable +*/}} +{{- define "airbyte.secretsManager.googleSecretManager.credentials.env" }} +- name: SECRET_STORE_GCP_CREDENTIALS + valueFrom: + secretKeyRef: + name: {{ include "airbyte.secretsManager.secretName" . }} + key: {{ include "airbyte.secretsManager.googleSecretManager.credentials.secretKey" . }} +{{- end }} + +{{/* +Renders the global.secretsManager.vault.address value +*/}} +{{- define "airbyte.secretsManager.vault.address" }} + {{- .Values.global.secretsManager.vault.address | default "http://airbyte-vault-svc.ab:8200" }} +{{- end }} + +{{/* +Renders the secretsManager.vault.address environment variable +*/}} +{{- define "airbyte.secretsManager.vault.address.env" }} +- name: VAULT_ADDRESS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: VAULT_ADDRESS +{{- end }} + +{{/* +Renders the global.secretsManager.vault.prefix value +*/}} +{{- define "airbyte.secretsManager.vault.prefix" }} + {{- .Values.global.secretsManager.vault.prefix | default "secret/" }} +{{- end }} + +{{/* +Renders the secretsManager.vault.prefix environment variable +*/}} +{{- define "airbyte.secretsManager.vault.prefix.env" }} +- name: VAULT_PREFIX + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: VAULT_PREFIX +{{- end }} + +{{/* +Renders the global.secretsManager.vault.authToken value +*/}} +{{- define "airbyte.secretsManager.vault.authToken" }} + {{- .Values.global.secretsManager.vault.authToken }} +{{- end }} + +{{/* +Renders the secretsManager.vault.authToken secret key +*/}} +{{- define "airbyte.secretsManager.vault.authToken.secretKey" }} + {{- .Values.global.secretsManager.vault.authTokenSecretKey | default "VAULT_AUTH_TOKEN" }} +{{- end }} + +{{/* +Renders the secretsManager.vault.authToken environment variable +*/}} +{{- define "airbyte.secretsManager.vault.authToken.env" }} +- name: VAULT_AUTH_TOKEN + valueFrom: + secretKeyRef: + name: {{ include "airbyte.secretsManager.secretName" . }} + key: {{ include "airbyte.secretsManager.vault.authToken.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all secretsManager environment variables +*/}} +{{- define "airbyte.secretsManager.envs" }} +{{- include "airbyte.secretsManager.type.env" . }} +{{- $opt := (include "airbyte.secretsManager.type" .) }} + +{{- if eq $opt "AWS_SECRET_MANAGER" }} +{{- include "airbyte.secretsManager.awsSecretManager.accessKeyId.env" . }} +{{- include "airbyte.secretsManager.awsSecretManager.secretAccessKey.env" . }} +{{- include "airbyte.secretsManager.awsSecretManager.region.env" . }} +{{- include "airbyte.secretsManager.awsSecretManager.tags.env" . }} +{{- include "airbyte.secretsManager.awsSecretManager.kmsArn.env" . }} +{{- end }} + +{{- if eq $opt "AZURE_KEY_VAULT" }} +{{- include "airbyte.secretsManager.azureKeyVault.clientId.env" . }} +{{- include "airbyte.secretsManager.azureKeyVault.clientSecret.env" . }} +{{- include "airbyte.secretsManager.azureKeyVault.tenantId.env" . }} +{{- include "airbyte.secretsManager.azureKeyVault.vaultUrl.env" . }} +{{- include "airbyte.secretsManager.azureKeyVault.clientIdRefName.env" . }} +{{- include "airbyte.secretsManager.azureKeyVault.clientIdRefKey.env" . }} +{{- include "airbyte.secretsManager.azureKeyVault.clientSecretRefName.env" . }} +{{- include "airbyte.secretsManager.azureKeyVault.clientSecretRefKey.env" . }} +{{- end }} + +{{- if eq $opt "GOOGLE_SECRET_MANAGER" }} +{{- include "airbyte.secretsManager.googleSecretManager.projectId.env" . }} +{{- include "airbyte.secretsManager.googleSecretManager.credentials.env" . }} +{{- end }} + +{{- if eq $opt "TESTING_CONFIG_DB_TABLE" }} +{{- end }} + +{{- if eq $opt "VAULT" }} +{{- include "airbyte.secretsManager.vault.address.env" . }} +{{- include "airbyte.secretsManager.vault.prefix.env" . }} +{{- include "airbyte.secretsManager.vault.authToken.env" . }} +{{- end }} + +{{- end }} + +{{/* +Renders the set of all secretsManager config map variables +*/}} +{{- define "airbyte.secretsManager.configVars" }} +SECRET_PERSISTENCE: {{ include "airbyte.secretsManager.type" . | quote }} +{{- $opt := (include "airbyte.secretsManager.type" .) }} + +{{- if eq $opt "AWS_SECRET_MANAGER" }} +AWS_SECRET_MANAGER_REGION: {{ include "airbyte.secretsManager.awsSecretManager.region" . | quote }} +AWS_SECRET_MANAGER_SECRET_TAGS: {{ include "airbyte.tagsToString" .Values.global.secretsManager.awsSecretManager.tags | quote }} +AWS_KMS_KEY_ARN: {{ include "airbyte.secretsManager.awsSecretManager.kmsArn" . | quote }} +{{- end }} + +{{- if eq $opt "AZURE_KEY_VAULT" }} +AB_AZURE_KEY_VAULT_TENANT_ID: {{ include "airbyte.secretsManager.azureKeyVault.tenantId" . | quote }} +AB_AZURE_KEY_VAULT_VAULT_URL: {{ include "airbyte.secretsManager.azureKeyVault.vaultUrl" . | quote }} +AB_AZURE_KEY_CLIENT_ID_REF_NAME: {{ include "airbyte.secretsManager.secretName" . | quote }} +AB_AZURE_KEY_CLIENT_ID_REF_KEY: {{ .Values.global.secretsManager.azureKeyVault.clientIdSecretKey | default "AB_AZURE_KEY_VAULT_CLIENT_ID" | quote }} +AB_AZURE_KEY_CLIENT_SECRET_REF_NAME: {{ include "airbyte.secretsManager.secretName" . | quote }} +AB_AZURE_KEY_CLIENT_SECRET_REF_KEY: {{ .Values.global.secretsManager.azureKeyVault.clientSecretSecretKey | default "AB_AZURE_KEY_VAULT_CLIENT_SECRET" | quote }} +{{- end }} + +{{- if eq $opt "GOOGLE_SECRET_MANAGER" }} +SECRET_STORE_GCP_PROJECT_ID: {{ include "airbyte.secretsManager.googleSecretManager.projectId" . | quote }} +{{- end }} + +{{- if eq $opt "TESTING_CONFIG_DB_TABLE" }} +{{- end }} + +{{- if eq $opt "VAULT" }} +VAULT_ADDRESS: {{ include "airbyte.secretsManager.vault.address" . | quote }} +VAULT_PREFIX: {{ include "airbyte.secretsManager.vault.prefix" . | quote }} +{{- end }} + +{{- end }} + +{{/* +Renders the set of all secretsManager secret variables +*/}} +{{- define "airbyte.secretsManager.secrets" }} +{{- $opt := (include "airbyte.secretsManager.type" .) }} + +{{- if eq $opt "AWS_SECRET_MANAGER" }} +AWS_SECRET_MANAGER_ACCESS_KEY_ID: {{ include "airbyte.secretsManager.awsSecretManager.accessKeyId" . | quote }} +AWS_SECRET_MANAGER_SECRET_ACCESS_KEY: {{ include "airbyte.secretsManager.awsSecretManager.secretAccessKey" . | quote }} +{{- end }} + +{{- if eq $opt "AZURE_KEY_VAULT" }} +AB_AZURE_KEY_VAULT_CLIENT_ID: {{ include "airbyte.secretsManager.azureKeyVault.clientId" . | quote }} +AB_AZURE_KEY_VAULT_CLIENT_SECRET: {{ include "airbyte.secretsManager.azureKeyVault.clientSecret" . | quote }} +{{- end }} + +{{- if eq $opt "GOOGLE_SECRET_MANAGER" }} +SECRET_STORE_GCP_CREDENTIALS: {{ include "airbyte.secretsManager.googleSecretManager.credentials" . | quote }} +{{- end }} + +{{- if eq $opt "TESTING_CONFIG_DB_TABLE" }} +{{- end }} + +{{- if eq $opt "VAULT" }} +VAULT_AUTH_TOKEN: {{ include "airbyte.secretsManager.vault.authToken" . | quote }} +{{- end }} + +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_storage.tpl b/charts/v2/airbyte/templates/config/_storage.tpl new file mode 100644 index 00000000000..f4fac0933cc --- /dev/null +++ b/charts/v2/airbyte/templates/config/_storage.tpl @@ -0,0 +1,452 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Storage Configuration +*/}} + +{{/* +Renders the storage secret name +*/}} +{{- define "airbyte.storage.secretName" }} +{{- if .Values.global.storage.secretName }} + {{- .Values.global.storage.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.storage.type value +*/}} +{{- define "airbyte.storage.type" }} + {{- .Values.global.storage.type | default "minio" }} +{{- end }} + +{{/* +Renders the storage.type environment variable +*/}} +{{- define "airbyte.storage.type.env" }} +- name: STORAGE_TYPE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: STORAGE_TYPE +{{- end }} + +{{/* +Renders the global.storage.bucket.activityPayload value +*/}} +{{- define "airbyte.storage.bucket.activityPayload" }} + {{- .Values.global.storage.bucket.activityPayload | default "airbyte-storage" }} +{{- end }} + +{{/* +Renders the storage.bucket.activityPayload environment variable +*/}} +{{- define "airbyte.storage.bucket.activityPayload.env" }} +- name: STORAGE_BUCKET_ACTIVITY_PAYLOAD + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: STORAGE_BUCKET_ACTIVITY_PAYLOAD +{{- end }} + +{{/* +Renders the global.storage.bucket.log value +*/}} +{{- define "airbyte.storage.bucket.log" }} + {{- .Values.global.storage.bucket.log | default "airbyte-storage" }} +{{- end }} + +{{/* +Renders the storage.bucket.log environment variable +*/}} +{{- define "airbyte.storage.bucket.log.env" }} +- name: STORAGE_BUCKET_LOG + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: STORAGE_BUCKET_LOG +{{- end }} + +{{/* +Renders the global.storage.bucket.state value +*/}} +{{- define "airbyte.storage.bucket.state" }} + {{- .Values.global.storage.bucket.state | default "airbyte-storage" }} +{{- end }} + +{{/* +Renders the storage.bucket.state environment variable +*/}} +{{- define "airbyte.storage.bucket.state.env" }} +- name: STORAGE_BUCKET_STATE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: STORAGE_BUCKET_STATE +{{- end }} + +{{/* +Renders the global.storage.bucket.workloadOutput value +*/}} +{{- define "airbyte.storage.bucket.workloadOutput" }} + {{- .Values.global.storage.bucket.workloadOutput | default "airbyte-storage" }} +{{- end }} + +{{/* +Renders the storage.bucket.workloadOutput environment variable +*/}} +{{- define "airbyte.storage.bucket.workloadOutput.env" }} +- name: STORAGE_BUCKET_WORKLOAD_OUTPUT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: STORAGE_BUCKET_WORKLOAD_OUTPUT +{{- end }} + +{{/* +Renders the global.storage.s3.region value +*/}} +{{- define "airbyte.storage.s3.region" }} + {{- .Values.global.storage.s3.region }} +{{- end }} + +{{/* +Renders the storage.s3.region environment variable +*/}} +{{- define "airbyte.storage.s3.region.env" }} +- name: AWS_DEFAULT_REGION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AWS_DEFAULT_REGION +{{- end }} + +{{/* +Renders the global.storage.s3.authenticationType value +*/}} +{{- define "airbyte.storage.s3.authenticationType" }} + {{- .Values.global.storage.s3.authenticationType | default "credentials" }} +{{- end }} + +{{/* +Renders the storage.s3.authenticationType environment variable +*/}} +{{- define "airbyte.storage.s3.authenticationType.env" }} +- name: AWS_AUTHENTICATION_TYPE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AWS_AUTHENTICATION_TYPE +{{- end }} + +{{/* +Renders the global.storage.s3.accessKeyId value +*/}} +{{- define "airbyte.storage.s3.accessKeyId" }} + {{- .Values.global.storage.s3.accessKeyId }} +{{- end }} + +{{/* +Renders the storage.s3.accessKeyId secret key +*/}} +{{- define "airbyte.storage.s3.accessKeyId.secretKey" }} + {{- .Values.global.storage.s3.accessKeyIdSecretKey | default "AWS_ACCESS_KEY_ID" }} +{{- end }} + +{{/* +Renders the storage.s3.accessKeyId environment variable +*/}} +{{- define "airbyte.storage.s3.accessKeyId.env" }} +- name: AWS_ACCESS_KEY_ID + valueFrom: + secretKeyRef: + name: {{ include "airbyte.storage.secretName" . }} + key: {{ include "airbyte.storage.s3.accessKeyId.secretKey" . }} +{{- end }} + +{{/* +Renders the global.storage.s3.secretAccesskey value +*/}} +{{- define "airbyte.storage.s3.secretAccesskey" }} + {{- .Values.global.storage.s3.secretAccesskey }} +{{- end }} + +{{/* +Renders the storage.s3.secretAccesskey secret key +*/}} +{{- define "airbyte.storage.s3.secretAccesskey.secretKey" }} + {{- .Values.global.storage.s3.secretAccesskeySecretKey | default "AWS_SECRET_ACCESS_KEY" }} +{{- end }} + +{{/* +Renders the storage.s3.secretAccesskey environment variable +*/}} +{{- define "airbyte.storage.s3.secretAccesskey.env" }} +- name: AWS_SECRET_ACCESS_KEY + valueFrom: + secretKeyRef: + name: {{ include "airbyte.storage.secretName" . }} + key: {{ include "airbyte.storage.s3.secretAccesskey.secretKey" . }} +{{- end }} + +{{/* +Renders the global.storage.azure.connectionString value +*/}} +{{- define "airbyte.storage.azure.connectionString" }} + {{- .Values.global.storage.azure.connectionString }} +{{- end }} + +{{/* +Renders the storage.azure.connectionString secret key +*/}} +{{- define "airbyte.storage.azure.connectionString.secretKey" }} + {{- .Values.global.storage.azure.connectionStringSecretKey | default "AZURE_STORAGE_CONNECTION_STRING" }} +{{- end }} + +{{/* +Renders the storage.azure.connectionString environment variable +*/}} +{{- define "airbyte.storage.azure.connectionString.env" }} +- name: AZURE_STORAGE_CONNECTION_STRING + valueFrom: + secretKeyRef: + name: {{ include "airbyte.storage.secretName" . }} + key: {{ include "airbyte.storage.azure.connectionString.secretKey" . }} +{{- end }} + +{{/* +Renders the global.storage.gcs.credentialsJson value +*/}} +{{- define "airbyte.storage.gcs.credentialsJson" }} + {{- .Values.global.storage.gcs.credentialsJson }} +{{- end }} + +{{/* +Renders the storage.gcs.credentialsJson secret key +*/}} +{{- define "airbyte.storage.gcs.credentialsJson.secretKey" }} + {{- .Values.global.storage.gcs.credentialsJsonSecretKey | default "GOOGLE_APPLICATION_CREDENTIALS_JSON" }} +{{- end }} + +{{/* +Renders the storage.gcs.credentialsJson environment variable +*/}} +{{- define "airbyte.storage.gcs.credentialsJson.env" }} +- name: GOOGLE_APPLICATION_CREDENTIALS_JSON + valueFrom: + secretKeyRef: + name: {{ include "airbyte.storage.secretName" . }} + key: {{ include "airbyte.storage.gcs.credentialsJson.secretKey" . }} +{{- end }} + +{{/* +Renders the global.storage.gcs.credentialsJsonPath value +*/}} +{{- define "airbyte.storage.gcs.credentialsJsonPath" }} + {{- .Values.global.storage.gcs.credentialsJsonPath }} +{{- end }} + +{{/* +Renders the storage.gcs.credentialsJsonPath environment variable +*/}} +{{- define "airbyte.storage.gcs.credentialsJsonPath.env" }} +- name: GOOGLE_APPLICATION_CREDENTIALS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: GOOGLE_APPLICATION_CREDENTIALS +{{- end }} + +{{/* +Renders the storage.containerOrchestrator.secretMountPath environment variable +*/}} +{{- define "airbyte.storage.containerOrchestrator.secretMountPath.env" }} +- name: CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH +{{- end }} + +{{/* +Renders the global.storage.minio.accessKeyId value +*/}} +{{- define "airbyte.storage.minio.accessKeyId" }} + {{- .Values.global.storage.minio.accessKeyId | default "minio" }} +{{- end }} + +{{/* +Renders the storage.minio.accessKeyId secret key +*/}} +{{- define "airbyte.storage.minio.accessKeyId.secretKey" }} + {{- .Values.global.storage.minio.accessKeyIdSecretKey | default "AWS_ACCESS_KEY_ID" }} +{{- end }} + +{{/* +Renders the storage.minio.accessKeyId environment variable +*/}} +{{- define "airbyte.storage.minio.accessKeyId.env" }} +- name: AWS_ACCESS_KEY_ID + valueFrom: + secretKeyRef: + name: {{ include "airbyte.storage.secretName" . }} + key: {{ include "airbyte.storage.minio.accessKeyId.secretKey" . }} +{{- end }} + +{{/* +Renders the global.storage.minio.secretAccessKey value +*/}} +{{- define "airbyte.storage.minio.secretAccessKey" }} + {{- .Values.global.storage.minio.secretAccessKey | default "minio123" }} +{{- end }} + +{{/* +Renders the storage.minio.secretAccessKey secret key +*/}} +{{- define "airbyte.storage.minio.secretAccessKey.secretKey" }} + {{- .Values.global.storage.minio.secretAccessKeySecretKey | default "AWS_SECRET_ACCESS_KEY" }} +{{- end }} + +{{/* +Renders the storage.minio.secretAccessKey environment variable +*/}} +{{- define "airbyte.storage.minio.secretAccessKey.env" }} +- name: AWS_SECRET_ACCESS_KEY + valueFrom: + secretKeyRef: + name: {{ include "airbyte.storage.secretName" . }} + key: {{ include "airbyte.storage.minio.secretAccessKey.secretKey" . }} +{{- end }} + +{{/* +Renders the global.storage.minio.endpoint value +*/}} +{{- define "airbyte.storage.minio.endpoint" }} + {{- .Values.global.storage.minio.endpoint | default "http://airbyte-minio-svc:9000" }} +{{- end }} + +{{/* +Renders the storage.minio.endpoint environment variable +*/}} +{{- define "airbyte.storage.minio.endpoint.env" }} +- name: MINIO_ENDPOINT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: MINIO_ENDPOINT +{{- end }} + +{{/* +Renders the global.storage.minio.s3PathStyleAccess value +*/}} +{{- define "airbyte.storage.minio.s3PathStyleAccess" }} + {{- .Values.global.storage.minio.s3PathStyleAccess | default true }} +{{- end }} + +{{/* +Renders the storage.minio.s3PathStyleAccess environment variable +*/}} +{{- define "airbyte.storage.minio.s3PathStyleAccess.env" }} +- name: S3_PATH_STYLE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: S3_PATH_STYLE +{{- end }} + +{{/* +Renders the set of all storage environment variables +*/}} +{{- define "airbyte.storage.envs" }} +{{- include "airbyte.storage.type.env" . }} +{{- include "airbyte.storage.bucket.activityPayload.env" . }} +{{- include "airbyte.storage.bucket.log.env" . }} +{{- include "airbyte.storage.bucket.state.env" . }} +{{- include "airbyte.storage.bucket.workloadOutput.env" . }} +{{- $opt := (include "airbyte.storage.type" .) }} + +{{- if eq $opt "azure" }} +{{- include "airbyte.storage.azure.connectionString.env" . }} +{{- end }} + +{{- if eq $opt "gcs" }} +{{- include "airbyte.storage.gcs.credentialsJson.env" . }} +{{- include "airbyte.storage.gcs.credentialsJsonPath.env" . }} +{{- include "airbyte.storage.containerOrchestrator.secretMountPath.env" . }} +{{- end }} + +{{- if eq $opt "minio" }} +{{- include "airbyte.storage.minio.accessKeyId.env" . }} +{{- include "airbyte.storage.minio.secretAccessKey.env" . }} +{{- include "airbyte.storage.minio.endpoint.env" . }} +{{- include "airbyte.storage.minio.s3PathStyleAccess.env" . }} +{{- end }} + +{{- if eq $opt "s3" }} +{{- include "airbyte.storage.s3.region.env" . }} +{{- include "airbyte.storage.s3.authenticationType.env" . }} +{{- include "airbyte.storage.s3.accessKeyId.env" . }} +{{- include "airbyte.storage.s3.secretAccesskey.env" . }} +{{- end }} + +{{- end }} + +{{/* +Renders the set of all storage config map variables +*/}} +{{- define "airbyte.storage.configVars" }} +STORAGE_TYPE: {{ include "airbyte.storage.type" . | quote }} +STORAGE_BUCKET_ACTIVITY_PAYLOAD: {{ include "airbyte.storage.bucket.activityPayload" . | quote }} +STORAGE_BUCKET_LOG: {{ include "airbyte.storage.bucket.log" . | quote }} +STORAGE_BUCKET_STATE: {{ include "airbyte.storage.bucket.state" . | quote }} +STORAGE_BUCKET_WORKLOAD_OUTPUT: {{ include "airbyte.storage.bucket.workloadOutput" . | quote }} +{{- $opt := (include "airbyte.storage.type" .) }} + +{{- if eq $opt "azure" }} +{{- end }} + +{{- if eq $opt "gcs" }} +GOOGLE_APPLICATION_CREDENTIALS: {{ include "airbyte.storage.gcs.credentialsJsonPath" . | quote }} +CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH: {{ "/secrets/gcs-log-creds" | quote }} +{{- end }} + +{{- if eq $opt "minio" }} +MINIO_ENDPOINT: {{ include "airbyte.storage.minio.endpoint" . | quote }} +S3_PATH_STYLE: {{ include "airbyte.storage.minio.s3PathStyleAccess" . | quote }} +{{- end }} + +{{- if eq $opt "s3" }} +AWS_DEFAULT_REGION: {{ include "airbyte.storage.s3.region" . | quote }} +AWS_AUTHENTICATION_TYPE: {{ include "airbyte.storage.s3.authenticationType" . | quote }} +{{- end }} + +{{- end }} + +{{/* +Renders the set of all storage secret variables +*/}} +{{- define "airbyte.storage.secrets" }} +{{- $opt := (include "airbyte.storage.type" .) }} + +{{- if eq $opt "azure" }} +AZURE_STORAGE_CONNECTION_STRING: {{ include "airbyte.storage.azure.connectionString" . | quote }} +{{- end }} + +{{- if eq $opt "gcs" }} +GOOGLE_APPLICATION_CREDENTIALS_JSON: {{ include "airbyte.storage.gcs.credentialsJson" . | quote }} +{{- end }} + +{{- if eq $opt "minio" }} +AWS_ACCESS_KEY_ID: {{ include "airbyte.storage.minio.accessKeyId" . | quote }} +AWS_SECRET_ACCESS_KEY: {{ include "airbyte.storage.minio.secretAccessKey" . | quote }} +{{- end }} + +{{- if eq $opt "s3" }} +AWS_ACCESS_KEY_ID: {{ include "airbyte.storage.s3.accessKeyId" . | quote }} +AWS_SECRET_ACCESS_KEY: {{ include "airbyte.storage.s3.secretAccesskey" . | quote }} +{{- end }} + +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_temporal.tpl b/charts/v2/airbyte/templates/config/_temporal.tpl new file mode 100644 index 00000000000..7237a63c489 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_temporal.tpl @@ -0,0 +1,603 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Temporal Configuration +*/}} + +{{/* +Renders the temporal.autoSetup value +*/}} +{{- define "airbyte.temporal.autoSetup" }} + {{- .Values.temporal.autoSetup | default true }} +{{- end }} + +{{/* +Renders the temporal.autoSetup environment variable +*/}} +{{- define "airbyte.temporal.autoSetup.env" }} +- name: AUTO_SETUP + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: AUTO_SETUP +{{- end }} + +{{/* +Renders the temporal.database.engine value +*/}} +{{- define "airbyte.temporal.database.engine" }} + {{- .Values.temporal.database.engine | default "postgresql" }} +{{- end }} + +{{/* +Renders the temporal.database.engine environment variable +*/}} +{{- define "airbyte.temporal.database.engine.env" }} +- name: DB + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DB +{{- end }} + +{{/* +Renders the temporal.database.host environment variable +*/}} +{{- define "airbyte.temporal.database.host.env" }} +- name: POSTGRES_SEEDS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATABASE_HOST +{{- end }} + +{{/* +Renders the temporal.database.port environment variable +*/}} +{{- define "airbyte.temporal.database.port.env" }} +- name: DB_PORT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATABASE_PORT +{{- end }} + +{{/* +Renders the temporal.database.user secret key +*/}} +{{- define "airbyte.temporal.database.user.secretKey" }} + {{- .Values.temporal.database.userSecretKey | default "DATABASE_USER" }} +{{- end }} + +{{/* +Renders the temporal.database.user environment variable +*/}} +{{- define "airbyte.temporal.database.user.env" }} +- name: POSTGRES_USER + valueFrom: + secretKeyRef: + name: {{ include "airbyte.database.secretName" . }} + key: {{ include "airbyte.temporal.database.user.secretKey" . }} +{{- end }} + +{{/* +Renders the temporal.database.password secret key +*/}} +{{- define "airbyte.temporal.database.password.secretKey" }} + {{- .Values.temporal.database.passwordSecretKey | default "DATABASE_PASSWORD" }} +{{- end }} + +{{/* +Renders the temporal.database.password environment variable +*/}} +{{- define "airbyte.temporal.database.password.env" }} +- name: POSTGRES_PWD + valueFrom: + secretKeyRef: + name: {{ include "airbyte.database.secretName" . }} + key: {{ include "airbyte.temporal.database.password.secretKey" . }} +{{- end }} + +{{/* +Renders the temporal.database.tlsEnabled value +*/}} +{{- define "airbyte.temporal.database.tlsEnabled" }} + {{- ternary "true" "false" (eq .Values.global.database.type "external") }} +{{- end }} + +{{/* +Renders the temporal.database.tlsEnabled environment variable +*/}} +{{- define "airbyte.temporal.database.tlsEnabled.env" }} +- name: POSTGRES_TLS_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: POSTGRES_TLS_ENABLED +{{- end }} + +{{/* +Renders the temporal.database.tlsDisableHostVerification value +*/}} +{{- define "airbyte.temporal.database.tlsDisableHostVerification" }} + {{- ternary "true" "false" (eq .Values.global.database.type "external") }} +{{- end }} + +{{/* +Renders the temporal.database.tlsDisableHostVerification environment variable +*/}} +{{- define "airbyte.temporal.database.tlsDisableHostVerification.env" }} +- name: POSTGRES_TLS_DISABLE_HOST_VERIFICATION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: POSTGRES_TLS_DISABLE_HOST_VERIFICATION +{{- end }} + +{{/* +Renders the temporal.database.sqlTlsEnabled value +*/}} +{{- define "airbyte.temporal.database.sqlTlsEnabled" }} + {{- ternary "true" "false" (eq .Values.global.database.type "external") }} +{{- end }} + +{{/* +Renders the temporal.database.sqlTlsEnabled environment variable +*/}} +{{- define "airbyte.temporal.database.sqlTlsEnabled.env" }} +- name: SQL_TLS_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SQL_TLS_ENABLED +{{- end }} + +{{/* +Renders the temporal.database.sqlTlsDisableHostVerification value +*/}} +{{- define "airbyte.temporal.database.sqlTlsDisableHostVerification" }} + {{- ternary "true" "false" (eq .Values.global.database.type "external") }} +{{- end }} + +{{/* +Renders the temporal.database.sqlTlsDisableHostVerification environment variable +*/}} +{{- define "airbyte.temporal.database.sqlTlsDisableHostVerification.env" }} +- name: SQL_TLS_DISABLE_HOST_VERIFICATION + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SQL_TLS_DISABLE_HOST_VERIFICATION +{{- end }} + +{{/* +Renders the temporal.host value +*/}} +{{- define "airbyte.temporal.host" }} + {{- (printf "%s-temporal:%d" .Release.Name (int .Values.temporal.service.port)) }} +{{- end }} + +{{/* +Renders the temporal.host environment variable +*/}} +{{- define "airbyte.temporal.host.env" }} +- name: TEMPORAL_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_HOST +{{- end }} + +{{/* +Renders the temporal.configFilePath value +*/}} +{{- define "airbyte.temporal.configFilePath" }} + {{- .Values.temporal.configFilePath | default "config/dynamicconfig/development.yaml" }} +{{- end }} + +{{/* +Renders the temporal.configFilePath environment variable +*/}} +{{- define "airbyte.temporal.configFilePath.env" }} +- name: DYNAMIC_CONFIG_FILE_PATH + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DYNAMIC_CONFIG_FILE_PATH +{{- end }} + +{{/* +Renders the set of all temporal environment variables +*/}} +{{- define "airbyte.temporal.envs" }} +{{- include "airbyte.temporal.autoSetup.env" . }} +{{- include "airbyte.temporal.database.engine.env" . }} +{{- include "airbyte.temporal.database.host.env" . }} +{{- include "airbyte.temporal.database.port.env" . }} +{{- include "airbyte.temporal.database.user.env" . }} +{{- include "airbyte.temporal.database.password.env" . }} +{{- include "airbyte.temporal.database.tlsEnabled.env" . }} +{{- include "airbyte.temporal.database.tlsDisableHostVerification.env" . }} +{{- include "airbyte.temporal.database.sqlTlsEnabled.env" . }} +{{- include "airbyte.temporal.database.sqlTlsDisableHostVerification.env" . }} +{{- include "airbyte.temporal.host.env" . }} +{{- include "airbyte.temporal.configFilePath.env" . }} +{{- end }} + +{{/* +Renders the set of all temporal config map variables +*/}} +{{- define "airbyte.temporal.configVars" }} +AUTO_SETUP: {{ include "airbyte.temporal.autoSetup" . | quote }} +DB: {{ include "airbyte.temporal.database.engine" . | quote }} +POSTGRES_TLS_ENABLED: {{ ternary "true" "false" (eq .Values.global.database.type "external") | quote }} +POSTGRES_TLS_DISABLE_HOST_VERIFICATION: {{ ternary "true" "false" (eq .Values.global.database.type "external") | quote }} +SQL_TLS_ENABLED: {{ ternary "true" "false" (eq .Values.global.database.type "external") | quote }} +SQL_TLS_DISABLE_HOST_VERIFICATION: {{ ternary "true" "false" (eq .Values.global.database.type "external") | quote }} +TEMPORAL_HOST: {{ (printf "%s-temporal:%d" .Release.Name (int .Values.temporal.service.port)) | quote }} +DYNAMIC_CONFIG_FILE_PATH: {{ include "airbyte.temporal.configFilePath" . | quote }} +{{- end }} + +{{/* +Renders the temporal.cli secret name +*/}} +{{- define "airbyte.temporal.cli.secretName" }} +{{- if .Values.global.temporal.cli.secretName }} + {{- .Values.global.temporal.cli.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.temporal.cli.address value +*/}} +{{- define "airbyte.temporal.cli.address" }} + {{- .Values.global.temporal.cli.address }} +{{- end }} + +{{/* +Renders the temporal.cli.address environment variable +*/}} +{{- define "airbyte.temporal.cli.address.env" }} +- name: TEMPORAL_CLI_ADDRESS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_CLI_ADDRESS +{{- end }} + +{{/* +Renders the global.temporal.cli.namespace value +*/}} +{{- define "airbyte.temporal.cli.namespace" }} + {{- .Values.global.temporal.cli.namespace }} +{{- end }} + +{{/* +Renders the temporal.cli.namespace environment variable +*/}} +{{- define "airbyte.temporal.cli.namespace.env" }} +- name: TEMPORAL_CLI_NAMESPACE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_CLI_NAMESPACE +{{- end }} + +{{/* +Renders the global.temporal.cli.tlsCert value +*/}} +{{- define "airbyte.temporal.cli.tlsCert" }} + {{- .Values.global.temporal.cli.tlsCert }} +{{- end }} + +{{/* +Renders the temporal.cli.tlsCert secret key +*/}} +{{- define "airbyte.temporal.cli.tlsCert.secretKey" }} + {{- .Values.global.temporal.cli.tlsCertSecretKey | default "TEMPORAL_CLI_TLS_CERT" }} +{{- end }} + +{{/* +Renders the temporal.cli.tlsCert environment variable +*/}} +{{- define "airbyte.temporal.cli.tlsCert.env" }} +- name: TEMPORAL_CLI_TLS_CERT + valueFrom: + secretKeyRef: + name: {{ include "airbyte.temporal.cli.secretName" . }} + key: {{ include "airbyte.temporal.cli.tlsCert.secretKey" . }} +{{- end }} + +{{/* +Renders the global.temporal.cli.tlsKey value +*/}} +{{- define "airbyte.temporal.cli.tlsKey" }} + {{- .Values.global.temporal.cli.tlsKey }} +{{- end }} + +{{/* +Renders the temporal.cli.tlsKey secret key +*/}} +{{- define "airbyte.temporal.cli.tlsKey.secretKey" }} + {{- .Values.global.temporal.cli.tlsKeySecretKey | default "TEMPORAL_CLI_TLS_KEY" }} +{{- end }} + +{{/* +Renders the temporal.cli.tlsKey environment variable +*/}} +{{- define "airbyte.temporal.cli.tlsKey.env" }} +- name: TEMPORAL_CLI_TLS_KEY + valueFrom: + secretKeyRef: + name: {{ include "airbyte.temporal.cli.secretName" . }} + key: {{ include "airbyte.temporal.cli.tlsKey.secretKey" . }} +{{- end }} + +{{/* +Renders the set of all temporal.cli environment variables +*/}} +{{- define "airbyte.temporal.cli.envs" }} +{{- include "airbyte.temporal.cli.address.env" . }} +{{- include "airbyte.temporal.cli.namespace.env" . }} +{{- include "airbyte.temporal.cli.tlsCert.env" . }} +{{- include "airbyte.temporal.cli.tlsKey.env" . }} +{{- end }} + +{{/* +Renders the set of all temporal.cli config map variables +*/}} +{{- define "airbyte.temporal.cli.configVars" }} +TEMPORAL_CLI_ADDRESS: {{ include "airbyte.temporal.cli.address" . | quote }} +TEMPORAL_CLI_NAMESPACE: {{ include "airbyte.temporal.cli.namespace" . | quote }} +{{- end }} + +{{/* +Renders the set of all temporal.cli secret variables +*/}} +{{- define "airbyte.temporal.cli.secrets" }} +TEMPORAL_CLI_TLS_CERT: {{ include "airbyte.temporal.cli.tlsCert" . | quote }} +TEMPORAL_CLI_TLS_KEY: {{ include "airbyte.temporal.cli.tlsKey" . | quote }} +{{- end }} + +{{/* +Renders the temporal.cloud secret name +*/}} +{{- define "airbyte.temporal.cloud.secretName" }} +{{- if .Values.global.temporal.cloud.secretName }} + {{- .Values.global.temporal.cloud.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.temporal.cloud.enabled value +*/}} +{{- define "airbyte.temporal.cloud.enabled" }} + {{- .Values.global.temporal.cloud.enabled | default false }} +{{- end }} + +{{/* +Renders the temporal.cloud.enabled environment variable +*/}} +{{- define "airbyte.temporal.cloud.enabled.env" }} +- name: TEMPORAL_CLOUD_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_CLOUD_ENABLED +{{- end }} + +{{/* +Renders the global.temporal.cloud.clientCert value +*/}} +{{- define "airbyte.temporal.cloud.clientCert" }} + {{- .Values.global.temporal.cloud.clientCert }} +{{- end }} + +{{/* +Renders the temporal.cloud.clientCert secret key +*/}} +{{- define "airbyte.temporal.cloud.clientCert.secretKey" }} + {{- .Values.global.temporal.cloud.clientCertSecretKey | default "TEMPORAL_CLOUD_CLIENT_CERT" }} +{{- end }} + +{{/* +Renders the temporal.cloud.clientCert environment variable +*/}} +{{- define "airbyte.temporal.cloud.clientCert.env" }} +- name: TEMPORAL_CLOUD_CLIENT_CERT + valueFrom: + secretKeyRef: + name: {{ include "airbyte.temporal.cloud.secretName" . }} + key: {{ include "airbyte.temporal.cloud.clientCert.secretKey" . }} +{{- end }} + +{{/* +Renders the global.temporal.cloud.clientKey value +*/}} +{{- define "airbyte.temporal.cloud.clientKey" }} + {{- .Values.global.temporal.cloud.clientKey }} +{{- end }} + +{{/* +Renders the temporal.cloud.clientKey secret key +*/}} +{{- define "airbyte.temporal.cloud.clientKey.secretKey" }} + {{- .Values.global.temporal.cloud.clientKeySecretKey | default "TEMPORAL_CLOUD_CLIENT_KEY" }} +{{- end }} + +{{/* +Renders the temporal.cloud.clientKey environment variable +*/}} +{{- define "airbyte.temporal.cloud.clientKey.env" }} +- name: TEMPORAL_CLOUD_CLIENT_KEY + valueFrom: + secretKeyRef: + name: {{ include "airbyte.temporal.cloud.secretName" . }} + key: {{ include "airbyte.temporal.cloud.clientKey.secretKey" . }} +{{- end }} + +{{/* +Renders the global.temporal.cloud.namespace value +*/}} +{{- define "airbyte.temporal.cloud.namespace" }} + {{- .Values.global.temporal.cloud.namespace }} +{{- end }} + +{{/* +Renders the temporal.cloud.namespace environment variable +*/}} +{{- define "airbyte.temporal.cloud.namespace.env" }} +- name: TEMPORAL_CLOUD_NAMESPACE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_CLOUD_NAMESPACE +{{- end }} + +{{/* +Renders the global.temporal.cloud.host value +*/}} +{{- define "airbyte.temporal.cloud.host" }} + {{- .Values.global.temporal.cloud.host }} +{{- end }} + +{{/* +Renders the temporal.cloud.host environment variable +*/}} +{{- define "airbyte.temporal.cloud.host.env" }} +- name: TEMPORAL_CLOUD_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_CLOUD_HOST +{{- end }} + +{{/* +Renders the set of all temporal.cloud environment variables +*/}} +{{- define "airbyte.temporal.cloud.envs" }} +{{- include "airbyte.temporal.cloud.enabled.env" . }} +{{- include "airbyte.temporal.cloud.clientCert.env" . }} +{{- include "airbyte.temporal.cloud.clientKey.env" . }} +{{- include "airbyte.temporal.cloud.namespace.env" . }} +{{- include "airbyte.temporal.cloud.host.env" . }} +{{- end }} + +{{/* +Renders the set of all temporal.cloud config map variables +*/}} +{{- define "airbyte.temporal.cloud.configVars" }} +TEMPORAL_CLOUD_ENABLED: {{ include "airbyte.temporal.cloud.enabled" . | quote }} +TEMPORAL_CLOUD_NAMESPACE: {{ include "airbyte.temporal.cloud.namespace" . | quote }} +TEMPORAL_CLOUD_HOST: {{ include "airbyte.temporal.cloud.host" . | quote }} +{{- end }} + +{{/* +Renders the set of all temporal.cloud secret variables +*/}} +{{- define "airbyte.temporal.cloud.secrets" }} +TEMPORAL_CLOUD_CLIENT_CERT: {{ include "airbyte.temporal.cloud.clientCert" . | quote }} +TEMPORAL_CLOUD_CLIENT_KEY: {{ include "airbyte.temporal.cloud.clientKey" . | quote }} +{{- end }} + +{{/* +Renders the temporal.sdk.rpc.timeout value +*/}} +{{- define "airbyte.temporal.sdk.rpc.timeout" }} + {{- .Values.temporal.sdk.rpc.timeout | default "120s" }} +{{- end }} + +{{/* +Renders the temporal.sdk.rpc.timeout environment variable +*/}} +{{- define "airbyte.temporal.sdk.rpc.timeout.env" }} +- name: TEMPORAL_SDK_RPC_TIMEOUT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_SDK_RPC_TIMEOUT +{{- end }} + +{{/* +Renders the temporal.sdk.rpc.longPollTimeout value +*/}} +{{- define "airbyte.temporal.sdk.rpc.longPollTimeout" }} + {{- .Values.temporal.sdk.rpc.longPollTimeout | default "140s" }} +{{- end }} + +{{/* +Renders the temporal.sdk.rpc.longPollTimeout environment variable +*/}} +{{- define "airbyte.temporal.sdk.rpc.longPollTimeout.env" }} +- name: TEMPORAL_SDK_RPC_LONG_POLL_TIMEOUT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_SDK_RPC_LONG_POLL_TIMEOUT +{{- end }} + +{{/* +Renders the temporal.sdk.rpc.queryTimeout value +*/}} +{{- define "airbyte.temporal.sdk.rpc.queryTimeout" }} + {{- .Values.temporal.sdk.rpc.queryTimeout | default "20s" }} +{{- end }} + +{{/* +Renders the temporal.sdk.rpc.queryTimeout environment variable +*/}} +{{- define "airbyte.temporal.sdk.rpc.queryTimeout.env" }} +- name: TEMPORAL_SDK_RPC_QUERY_TIMEOUT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_SDK_RPC_QUERY_TIMEOUT +{{- end }} + +{{/* +Renders the set of all temporal.sdk environment variables +*/}} +{{- define "airbyte.temporal.sdk.envs" }} +{{- include "airbyte.temporal.sdk.rpc.timeout.env" . }} +{{- include "airbyte.temporal.sdk.rpc.longPollTimeout.env" . }} +{{- include "airbyte.temporal.sdk.rpc.queryTimeout.env" . }} +{{- end }} + +{{/* +Renders the set of all temporal.sdk config map variables +*/}} +{{- define "airbyte.temporal.sdk.configVars" }} +TEMPORAL_SDK_RPC_TIMEOUT: {{ include "airbyte.temporal.sdk.rpc.timeout" . | quote }} +TEMPORAL_SDK_RPC_LONG_POLL_TIMEOUT: {{ include "airbyte.temporal.sdk.rpc.longPollTimeout" . | quote }} +TEMPORAL_SDK_RPC_QUERY_TIMEOUT: {{ include "airbyte.temporal.sdk.rpc.queryTimeout" . | quote }} +{{- end }} + +{{/* +Renders the temporal.worker.ports environment variable +*/}} +{{- define "airbyte.temporal.worker.ports.env" }} +- name: TEMPORAL_WORKER_PORTS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TEMPORAL_WORKER_PORTS +{{- end }} + +{{/* +Renders the set of all temporal.worker environment variables +*/}} +{{- define "airbyte.temporal.worker.envs" }} +{{- include "airbyte.temporal.worker.ports.env" . }} +{{- end }} + +{{/* +Renders the set of all temporal.worker config map variables +*/}} +{{- define "airbyte.temporal.worker.configVars" }} +TEMPORAL_WORKER_PORTS: {{ "9001,9002,9003,9004,9005,9006,9007,9008,9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,9022,9023,9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039,9040" | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_topology.tpl b/charts/v2/airbyte/templates/config/_topology.tpl new file mode 100644 index 00000000000..5562a5cc903 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_topology.tpl @@ -0,0 +1,109 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Topology Configuration +*/}} + +{{/* +Renders the topology secret name +*/}} +{{- define "airbyte.topology.secretName" }} +{{- if .Values.global.topology.secretName }} + {{- .Values.global.topology.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.topology.nodeSelectorLabel value +*/}} +{{- define "airbyte.topology.nodeSelectorLabel" }} + {{- .Values.global.topology.nodeSelectorLabel | default "airbyte/node-pool" }} +{{- end }} + +{{/* +Renders the topology.nodeSelectorLabel environment variable +*/}} +{{- define "airbyte.topology.nodeSelectorLabel.env" }} +- name: NODE_SELECTOR_LABEL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: NODE_SELECTOR_LABEL +{{- end }} + +{{/* +Renders the global.topology.nodeSelectors.mainNodePool value +*/}} +{{- define "airbyte.topology.nodeSelectors.mainNodePool" }} + {{- .Values.global.topology.nodeSelectors.mainNodePool | default "main" }} +{{- end }} + +{{/* +Renders the topology.nodeSelectors.mainNodePool environment variable +*/}} +{{- define "airbyte.topology.nodeSelectors.mainNodePool.env" }} +- name: NODE_SELECTOR_LABEL_MAIN_NODE_POOL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: NODE_SELECTOR_LABEL_MAIN_NODE_POOL +{{- end }} + +{{/* +Renders the global.topology.nodeSelectors.jobsNodePool value +*/}} +{{- define "airbyte.topology.nodeSelectors.jobsNodePool" }} + {{- .Values.global.topology.nodeSelectors.jobsNodePool | default "jobs" }} +{{- end }} + +{{/* +Renders the topology.nodeSelectors.jobsNodePool environment variable +*/}} +{{- define "airbyte.topology.nodeSelectors.jobsNodePool.env" }} +- name: NODE_SELECTOR_LABEL_JOBS_NODE_POOL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: NODE_SELECTOR_LABEL_JOBS_NODE_POOL +{{- end }} + +{{/* +Renders the global.topology.nodeSelectors.quickJobsNodePool value +*/}} +{{- define "airbyte.topology.nodeSelectors.quickJobsNodePool" }} + {{- .Values.global.topology.nodeSelectors.quickJobsNodePool | default "quick-jobs" }} +{{- end }} + +{{/* +Renders the topology.nodeSelectors.quickJobsNodePool environment variable +*/}} +{{- define "airbyte.topology.nodeSelectors.quickJobsNodePool.env" }} +- name: NODE_SELECTOR_LABEL_QUICK_JOBS_NODE_POOL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: NODE_SELECTOR_LABEL_QUICK_JOBS_NODE_POOL +{{- end }} + +{{/* +Renders the set of all topology environment variables +*/}} +{{- define "airbyte.topology.envs" }} +{{- include "airbyte.topology.nodeSelectorLabel.env" . }} +{{- include "airbyte.topology.nodeSelectors.mainNodePool.env" . }} +{{- include "airbyte.topology.nodeSelectors.jobsNodePool.env" . }} +{{- include "airbyte.topology.nodeSelectors.quickJobsNodePool.env" . }} +{{- end }} + +{{/* +Renders the set of all topology config map variables +*/}} +{{- define "airbyte.topology.configVars" }} +NODE_SELECTOR_LABEL: {{ include "airbyte.topology.nodeSelectorLabel" . | quote }} +NODE_SELECTOR_LABEL_MAIN_NODE_POOL: {{ include "airbyte.topology.nodeSelectors.mainNodePool" . | quote }} +NODE_SELECTOR_LABEL_JOBS_NODE_POOL: {{ include "airbyte.topology.nodeSelectors.jobsNodePool" . | quote }} +NODE_SELECTOR_LABEL_QUICK_JOBS_NODE_POOL: {{ include "airbyte.topology.nodeSelectors.quickJobsNodePool" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_tracking.tpl b/charts/v2/airbyte/templates/config/_tracking.tpl new file mode 100644 index 00000000000..68a730e2a80 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_tracking.tpl @@ -0,0 +1,89 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Tracking Configuration +*/}} + +{{/* +Renders the tracking secret name +*/}} +{{- define "airbyte.tracking.secretName" }} +{{- if .Values.global.tracking.secretName }} + {{- .Values.global.tracking.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.tracking.enabled value +*/}} +{{- define "airbyte.tracking.enabled" }} + {{- .Values.global.tracking.enabled | default true }} +{{- end }} + +{{/* +Renders the tracking.enabled environment variable +*/}} +{{- define "airbyte.tracking.enabled.env" }} +- name: TRACKING_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TRACKING_ENABLED +{{- end }} + +{{/* +Renders the global.tracking.strategy value +*/}} +{{- define "airbyte.tracking.strategy" }} + {{- .Values.global.tracking.strategy | default "logging" }} +{{- end }} + +{{/* +Renders the tracking.strategy environment variable +*/}} +{{- define "airbyte.tracking.strategy.env" }} +- name: TRACKING_STRATEGY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: TRACKING_STRATEGY +{{- end }} + +{{/* +Renders the global.tracking.segment.writeKeySecretKey value +*/}} +{{- define "airbyte.tracking.segment.writeKeySecretKey" }} + {{- .Values.global.tracking.segment.writeKeySecretKey | default "7UDdp5K55CyiGgsauOr2pNNujGvmhaeu" }} +{{- end }} + +{{/* +Renders the tracking.segment.writeKeySecretKey environment variable +*/}} +{{- define "airbyte.tracking.segment.writeKeySecretKey.env" }} +- name: SEGMENT_WRITE_KEY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SEGMENT_WRITE_KEY +{{- end }} + +{{/* +Renders the set of all tracking environment variables +*/}} +{{- define "airbyte.tracking.envs" }} +{{- include "airbyte.tracking.enabled.env" . }} +{{- include "airbyte.tracking.strategy.env" . }} +{{- include "airbyte.tracking.segment.writeKeySecretKey.env" . }} +{{- end }} + +{{/* +Renders the set of all tracking config map variables +*/}} +{{- define "airbyte.tracking.configVars" }} +TRACKING_ENABLED: {{ include "airbyte.tracking.enabled" . | quote }} +TRACKING_STRATEGY: {{ include "airbyte.tracking.strategy" . | quote }} +SEGMENT_WRITE_KEY: {{ include "airbyte.tracking.segment.writeKeySecretKey" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_webapp.tpl b/charts/v2/airbyte/templates/config/_webapp.tpl new file mode 100644 index 00000000000..3e37a7fae5b --- /dev/null +++ b/charts/v2/airbyte/templates/config/_webapp.tpl @@ -0,0 +1,71 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Webapp Configuration +*/}} + +{{/* +Renders the webapp.api.url environment variable +*/}} +{{- define "airbyte.webapp.api.url.env" }} +- name: API_URL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: API_URL +{{- end }} + +{{/* +Renders the webapp.connectorBuilderServer.host value +*/}} +{{- define "airbyte.webapp.connectorBuilderServer.host" }} + {{- (printf "%s-airbyte-connector-builder-server-svc:%d" .Release.Name (int .Values.connectorBuilderServer.service.port)) }} +{{- end }} + +{{/* +Renders the webapp.connectorBuilderServer.host environment variable +*/}} +{{- define "airbyte.webapp.connectorBuilderServer.host.env" }} +- name: CONNECTOR_BUILDER_API_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONNECTOR_BUILDER_API_HOST +{{- end }} + +{{/* +Renders the webapp.connectorBuilderServer.url value +*/}} +{{- define "airbyte.webapp.connectorBuilderServer.url" }} + {{- .Values.webapp.connectorBuilderServer.url | default "/connector-builder-api" }} +{{- end }} + +{{/* +Renders the webapp.connectorBuilderServer.url environment variable +*/}} +{{- define "airbyte.webapp.connectorBuilderServer.url.env" }} +- name: CONNECTOR_BUILDER_API_URL + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONNECTOR_BUILDER_API_URL +{{- end }} + +{{/* +Renders the set of all webapp environment variables +*/}} +{{- define "airbyte.webapp.envs" }} +{{- include "airbyte.webapp.api.url.env" . }} +{{- include "airbyte.webapp.connectorBuilderServer.host.env" . }} +{{- include "airbyte.webapp.connectorBuilderServer.url.env" . }} +{{- end }} + +{{/* +Renders the set of all webapp config map variables +*/}} +{{- define "airbyte.webapp.configVars" }} +API_URL: {{ "/api/v1" | quote }} +CONNECTOR_BUILDER_API_HOST: {{ (printf "%s-airbyte-connector-builder-server-svc:%d" .Release.Name (int .Values.connectorBuilderServer.service.port)) | quote }} +CONNECTOR_BUILDER_API_URL: {{ include "airbyte.webapp.connectorBuilderServer.url" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_worker.tpl b/charts/v2/airbyte/templates/config/_worker.tpl new file mode 100644 index 00000000000..46431e5bf1a --- /dev/null +++ b/charts/v2/airbyte/templates/config/_worker.tpl @@ -0,0 +1,304 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Worker Configuration +*/}} + +{{/* +Renders the worker.activityMaxAttempt value +*/}} +{{- define "airbyte.worker.activityMaxAttempt" }} + {{- .Values.worker.activityMaxAttempt }} +{{- end }} + +{{/* +Renders the worker.activityMaxAttempt environment variable +*/}} +{{- define "airbyte.worker.activityMaxAttempt.env" }} +- name: ACTIVITY_MAX_ATTEMPT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: ACTIVITY_MAX_ATTEMPT +{{- end }} + +{{/* +Renders the worker.activityInitialDelayBetweenAttemptsSeconds value +*/}} +{{- define "airbyte.worker.activityInitialDelayBetweenAttemptsSeconds" }} + {{- .Values.worker.activityInitialDelayBetweenAttemptsSeconds }} +{{- end }} + +{{/* +Renders the worker.activityInitialDelayBetweenAttemptsSeconds environment variable +*/}} +{{- define "airbyte.worker.activityInitialDelayBetweenAttemptsSeconds.env" }} +- name: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS +{{- end }} + +{{/* +Renders the worker.activityMaxDelayBetweenAttemptsSeconds value +*/}} +{{- define "airbyte.worker.activityMaxDelayBetweenAttemptsSeconds" }} + {{- .Values.worker.activityMaxDelayBetweenAttemptsSeconds }} +{{- end }} + +{{/* +Renders the worker.activityMaxDelayBetweenAttemptsSeconds environment variable +*/}} +{{- define "airbyte.worker.activityMaxDelayBetweenAttemptsSeconds.env" }} +- name: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS +{{- end }} + +{{/* +Renders the worker.configRoot environment variable +*/}} +{{- define "airbyte.worker.configRoot.env" }} +- name: CONFIG_ROOT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONFIG_ROOT +{{- end }} + +{{/* +Renders the worker.maxNotifyWorkers value +*/}} +{{- define "airbyte.worker.maxNotifyWorkers" }} + {{- .Values.worker.maxNotifyWorkers | default 30 }} +{{- end }} + +{{/* +Renders the worker.maxNotifyWorkers environment variable +*/}} +{{- define "airbyte.worker.maxNotifyWorkers.env" }} +- name: MAX_NOTIFY_WORKERS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: MAX_NOTIFY_WORKERS +{{- end }} + +{{/* +Renders the worker.maxSyncWorkers value +*/}} +{{- define "airbyte.worker.maxSyncWorkers" }} + {{- .Values.worker.maxSyncWorkers | default 5 }} +{{- end }} + +{{/* +Renders the worker.maxSyncWorkers environment variable +*/}} +{{- define "airbyte.worker.maxSyncWorkers.env" }} +- name: MAX_SYNC_WORKER + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: MAX_SYNC_WORKER +{{- end }} + +{{/* +Renders the worker.shouldRunNotifyWorkflows value +*/}} +{{- define "airbyte.worker.shouldRunNotifyWorkflows" }} + {{- .Values.worker.shouldRunNotifyWorkflows | default true }} +{{- end }} + +{{/* +Renders the worker.shouldRunNotifyWorkflows environment variable +*/}} +{{- define "airbyte.worker.shouldRunNotifyWorkflows.env" }} +- name: SHOULD_RUN_NOTIFY_WORKFLOWS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SHOULD_RUN_NOTIFY_WORKFLOWS +{{- end }} + +{{/* +Renders the worker.syncJobMaxAttempts value +*/}} +{{- define "airbyte.worker.syncJobMaxAttempts" }} + {{- .Values.worker.syncJobMaxAttempts | default 3 }} +{{- end }} + +{{/* +Renders the worker.syncJobMaxAttempts environment variable +*/}} +{{- define "airbyte.worker.syncJobMaxAttempts.env" }} +- name: SYNC_JOB_MAX_ATTEMPTS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SYNC_JOB_MAX_ATTEMPTS +{{- end }} + +{{/* +Renders the worker.syncJobMaxTimeoutDays value +*/}} +{{- define "airbyte.worker.syncJobMaxTimeoutDays" }} + {{- .Values.worker.syncJobMaxTimeoutDays | default 3 }} +{{- end }} + +{{/* +Renders the worker.syncJobMaxTimeoutDays environment variable +*/}} +{{- define "airbyte.worker.syncJobMaxTimeoutDays.env" }} +- name: SYNC_JOB_MAX_TIMEOUT_DAYS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SYNC_JOB_MAX_TIMEOUT_DAYS +{{- end }} + +{{/* +Renders the worker.syncJobInitRetryTimeoutMinutes value +*/}} +{{- define "airbyte.worker.syncJobInitRetryTimeoutMinutes" }} + {{- .Values.worker.syncJobInitRetryTimeoutMinutes | default 5 }} +{{- end }} + +{{/* +Renders the worker.syncJobInitRetryTimeoutMinutes environment variable +*/}} +{{- define "airbyte.worker.syncJobInitRetryTimeoutMinutes.env" }} +- name: SYNC_JOB_INIT_RETRY_TIMEOUT_MINUTES + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SYNC_JOB_INIT_RETRY_TIMEOUT_MINUTES +{{- end }} + +{{/* +Renders the worker.useStreamCapableState value +*/}} +{{- define "airbyte.worker.useStreamCapableState" }} + {{- .Values.worker.useStreamCapableState | default true }} +{{- end }} + +{{/* +Renders the worker.useStreamCapableState environment variable +*/}} +{{- define "airbyte.worker.useStreamCapableState.env" }} +- name: USE_STREAM_CAPABLE_STATE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: USE_STREAM_CAPABLE_STATE +{{- end }} + +{{/* +Renders the worker.workflowFailureRestartDelaySeconds value +*/}} +{{- define "airbyte.worker.workflowFailureRestartDelaySeconds" }} + {{- .Values.worker.workflowFailureRestartDelaySeconds }} +{{- end }} + +{{/* +Renders the worker.workflowFailureRestartDelaySeconds environment variable +*/}} +{{- define "airbyte.worker.workflowFailureRestartDelaySeconds.env" }} +- name: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKFLOW_FAILURE_RESTART_DELAY_SECONDS +{{- end }} + +{{/* +Renders the worker.workspaceDockerMount value +*/}} +{{- define "airbyte.worker.workspaceDockerMount" }} + {{- .Values.worker.workspaceDockerMount | default "airbyte_workspace" }} +{{- end }} + +{{/* +Renders the worker.workspaceDockerMount environment variable +*/}} +{{- define "airbyte.worker.workspaceDockerMount.env" }} +- name: WORKSPACE_DOCKER_MOUNT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKSPACE_DOCKER_MOUNT +{{- end }} + +{{/* +Renders the worker.workspaceRoot value +*/}} +{{- define "airbyte.worker.workspaceRoot" }} + {{- .Values.worker.workspaceRoot | default "/workspace" }} +{{- end }} + +{{/* +Renders the worker.workspaceRoot environment variable +*/}} +{{- define "airbyte.worker.workspaceRoot.env" }} +- name: WORKSPACE_ROOT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKSPACE_ROOT +{{- end }} + +{{/* +Renders the worker.environment environment variable +*/}} +{{- define "airbyte.worker.environment.env" }} +- name: WORKER_ENVIRONMENT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKER_ENVIRONMENT +{{- end }} + +{{/* +Renders the set of all worker environment variables +*/}} +{{- define "airbyte.worker.envs" }} +{{- include "airbyte.worker.activityMaxAttempt.env" . }} +{{- include "airbyte.worker.activityInitialDelayBetweenAttemptsSeconds.env" . }} +{{- include "airbyte.worker.activityMaxDelayBetweenAttemptsSeconds.env" . }} +{{- include "airbyte.worker.configRoot.env" . }} +{{- include "airbyte.worker.maxNotifyWorkers.env" . }} +{{- include "airbyte.worker.maxSyncWorkers.env" . }} +{{- include "airbyte.worker.shouldRunNotifyWorkflows.env" . }} +{{- include "airbyte.worker.syncJobMaxAttempts.env" . }} +{{- include "airbyte.worker.syncJobMaxTimeoutDays.env" . }} +{{- include "airbyte.worker.syncJobInitRetryTimeoutMinutes.env" . }} +{{- include "airbyte.worker.useStreamCapableState.env" . }} +{{- include "airbyte.worker.workflowFailureRestartDelaySeconds.env" . }} +{{- include "airbyte.worker.workspaceDockerMount.env" . }} +{{- include "airbyte.worker.workspaceRoot.env" . }} +{{- include "airbyte.worker.environment.env" . }} +{{- end }} + +{{/* +Renders the set of all worker config map variables +*/}} +{{- define "airbyte.worker.configVars" }} +ACTIVITY_MAX_ATTEMPT: {{ include "airbyte.worker.activityMaxAttempt" . | quote }} +ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS: {{ include "airbyte.worker.activityInitialDelayBetweenAttemptsSeconds" . | quote }} +ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS: {{ include "airbyte.worker.activityMaxDelayBetweenAttemptsSeconds" . | quote }} +CONFIG_ROOT: {{ "/configs" | quote }} +MAX_NOTIFY_WORKERS: {{ include "airbyte.worker.maxNotifyWorkers" . | quote }} +MAX_SYNC_WORKER: {{ include "airbyte.worker.maxSyncWorkers" . | quote }} +SHOULD_RUN_NOTIFY_WORKFLOWS: {{ include "airbyte.worker.shouldRunNotifyWorkflows" . | quote }} +SYNC_JOB_MAX_ATTEMPTS: {{ include "airbyte.worker.syncJobMaxAttempts" . | quote }} +SYNC_JOB_MAX_TIMEOUT_DAYS: {{ include "airbyte.worker.syncJobMaxTimeoutDays" . | quote }} +SYNC_JOB_INIT_RETRY_TIMEOUT_MINUTES: {{ include "airbyte.worker.syncJobInitRetryTimeoutMinutes" . | quote }} +USE_STREAM_CAPABLE_STATE: {{ include "airbyte.worker.useStreamCapableState" . | quote }} +WORKFLOW_FAILURE_RESTART_DELAY_SECONDS: {{ include "airbyte.worker.workflowFailureRestartDelaySeconds" . | quote }} +WORKSPACE_DOCKER_MOUNT: {{ include "airbyte.worker.workspaceDockerMount" . | quote }} +WORKSPACE_ROOT: {{ include "airbyte.worker.workspaceRoot" . | quote }} +WORKER_ENVIRONMENT: {{ "kubernetes" | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_workloadApiServer.tpl b/charts/v2/airbyte/templates/config/_workloadApiServer.tpl new file mode 100644 index 00000000000..5204648a8e9 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_workloadApiServer.tpl @@ -0,0 +1,142 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Workloadapiserver Configuration +*/}} + +{{/* +Renders the workloadApiServer secret name +*/}} +{{- define "airbyte.workloadApiServer.secretName" }} +{{- if .Values.workloadApiServer.secretName }} + {{- .Values.workloadApiServer.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the workloadApiServer.enabled value +*/}} +{{- define "airbyte.workloadApiServer.enabled" }} + {{- .Values.workloadApiServer.enabled | default true }} +{{- end }} + +{{/* +Renders the workloadApiServer.enabled environment variable +*/}} +{{- define "airbyte.workloadApiServer.enabled.env" }} +- name: WORKLOAD_API_SERVER_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_API_SERVER_ENABLED +{{- end }} + +{{/* +Renders the workloadApiServer.host value +*/}} +{{- define "airbyte.workloadApiServer.host" }} + {{- (printf "http://%s-workload-api-server-svc:%d" .Release.Name (int .Values.workloadApiServer.service.port)) }} +{{- end }} + +{{/* +Renders the workloadApiServer.host environment variable +*/}} +{{- define "airbyte.workloadApiServer.host.env" }} +- name: WORKLOAD_API_HOST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_API_HOST +{{- end }} + +{{/* +Renders the workloadApiServer.bearerToken value +*/}} +{{- define "airbyte.workloadApiServer.bearerToken" }} + {{- .Values.workloadApiServer.bearerToken | default "token" }} +{{- end }} + +{{/* +Renders the workloadApiServer.bearerToken secret key +*/}} +{{- define "airbyte.workloadApiServer.bearerToken.secretKey" }} + {{- .Values.workloadApiServer.bearerTokenSecretKey | default "WORKLOAD_API_BEARER_TOKEN" }} +{{- end }} + +{{/* +Renders the workloadApiServer.bearerToken environment variable +*/}} +{{- define "airbyte.workloadApiServer.bearerToken.env" }} +- name: WORKLOAD_API_BEARER_TOKEN + valueFrom: + secretKeyRef: + name: {{ include "airbyte.workloadApiServer.secretName" . }} + key: {{ include "airbyte.workloadApiServer.bearerToken.secretKey" . }} +{{- end }} + +{{/* +Renders the workloadApiServer.bearerTokenSecretName value +*/}} +{{- define "airbyte.workloadApiServer.bearerTokenSecretName" }} + {{- .Values.workloadApiServer.bearerTokenSecretName | default (include "airbyte.workloadApiServer.secretName" .) }} +{{- end }} + +{{/* +Renders the workloadApiServer.bearerTokenSecretName environment variable +*/}} +{{- define "airbyte.workloadApiServer.bearerTokenSecretName.env" }} +- name: WORKLOAD_API_BEARER_TOKEN_SECRET_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_API_BEARER_TOKEN_SECRET_NAME +{{- end }} + +{{/* +Renders the workloadApiServer.bearerTokenSecretKey value +*/}} +{{- define "airbyte.workloadApiServer.bearerTokenSecretKey" }} + {{- .Values.workloadApiServer.bearerTokenSecretKey | default "WORKLOAD_API_BEARER_TOKEN" }} +{{- end }} + +{{/* +Renders the workloadApiServer.bearerTokenSecretKey environment variable +*/}} +{{- define "airbyte.workloadApiServer.bearerTokenSecretKey.env" }} +- name: WORKLOAD_API_BEARER_TOKEN_SECRET_KEY + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_API_BEARER_TOKEN_SECRET_KEY +{{- end }} + +{{/* +Renders the set of all workloadApiServer environment variables +*/}} +{{- define "airbyte.workloadApiServer.envs" }} +{{- include "airbyte.workloadApiServer.enabled.env" . }} +{{- include "airbyte.workloadApiServer.host.env" . }} +{{- include "airbyte.workloadApiServer.bearerToken.env" . }} +{{- include "airbyte.workloadApiServer.bearerTokenSecretName.env" . }} +{{- include "airbyte.workloadApiServer.bearerTokenSecretKey.env" . }} +{{- end }} + +{{/* +Renders the set of all workloadApiServer config map variables +*/}} +{{- define "airbyte.workloadApiServer.configVars" }} +WORKLOAD_API_SERVER_ENABLED: {{ include "airbyte.workloadApiServer.enabled" . | quote }} +WORKLOAD_API_HOST: {{ (printf "http://%s-workload-api-server-svc:%d" .Release.Name (int .Values.workloadApiServer.service.port)) | quote }} +WORKLOAD_API_BEARER_TOKEN_SECRET_NAME: {{ include "airbyte.workloadApiServer.bearerTokenSecretName" . | quote }} +WORKLOAD_API_BEARER_TOKEN_SECRET_KEY: {{ include "airbyte.workloadApiServer.bearerTokenSecretKey" . | quote }} +{{- end }} + +{{/* +Renders the set of all workloadApiServer secret variables +*/}} +{{- define "airbyte.workloadApiServer.secrets" }} +WORKLOAD_API_BEARER_TOKEN: {{ include "airbyte.workloadApiServer.bearerToken" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_workloadLauncher.tpl b/charts/v2/airbyte/templates/config/_workloadLauncher.tpl new file mode 100644 index 00000000000..70222b44969 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_workloadLauncher.tpl @@ -0,0 +1,161 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Workloadlauncher Configuration +*/}} + +{{/* +Renders the workloadLauncher.enabled value +*/}} +{{- define "airbyte.workloadLauncher.enabled" }} + {{- .Values.workloadLauncher.enabled | default true }} +{{- end }} + +{{/* +Renders the workloadLauncher.enabled environment variable +*/}} +{{- define "airbyte.workloadLauncher.enabled.env" }} +- name: WORKLOAD_LAUNCHER_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_LAUNCHER_ENABLED +{{- end }} + +{{/* +Renders the workloadLauncher.parallelism value +*/}} +{{- define "airbyte.workloadLauncher.parallelism" }} + {{- .Values.workloadLauncher.parallelism | default 10 }} +{{- end }} + +{{/* +Renders the workloadLauncher.parallelism environment variable +*/}} +{{- define "airbyte.workloadLauncher.parallelism.env" }} +- name: WORKLOAD_LAUNCHER_PARALLELISM + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_LAUNCHER_PARALLELISM +{{- end }} + +{{/* +Renders the set of all workloadLauncher environment variables +*/}} +{{- define "airbyte.workloadLauncher.envs" }} +{{- include "airbyte.workloadLauncher.enabled.env" . }} +{{- include "airbyte.workloadLauncher.parallelism.env" . }} +{{- end }} + +{{/* +Renders the set of all workloadLauncher config map variables +*/}} +{{- define "airbyte.workloadLauncher.configVars" }} +WORKLOAD_LAUNCHER_ENABLED: {{ include "airbyte.workloadLauncher.enabled" . | quote }} +WORKLOAD_LAUNCHER_PARALLELISM: {{ include "airbyte.workloadLauncher.parallelism" . | quote }} +{{- end }} + +{{/* +Renders the workloadLauncher.images secret name +*/}} +{{- define "airbyte.workloadLauncher.images.secretName" }} +{{- if .Values.workloadLauncher.secretName }} + {{- .Values.workloadLauncher.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the workloadLauncher.connectorSidecar.image value +*/}} +{{- define "airbyte.workloadLauncher.images.connectorSidecar.image" }} + {{- include "imageUrl" (list .Values.workloadLauncher.connectorSidecar.image $) }} +{{- end }} + +{{/* +Renders the workloadLauncher.images.connectorSidecar.image environment variable +*/}} +{{- define "airbyte.workloadLauncher.images.connectorSidecar.image.env" }} +- name: CONNECTOR_SIDECAR_IMAGE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONNECTOR_SIDECAR_IMAGE +{{- end }} + +{{/* +Renders the workloadLauncher.containerOrchestrator.enabled value +*/}} +{{- define "airbyte.workloadLauncher.images.containerOrchestrator.enabled" }} + {{- .Values.workloadLauncher.containerOrchestrator.enabled }} +{{- end }} + +{{/* +Renders the workloadLauncher.images.containerOrchestrator.enabled environment variable +*/}} +{{- define "airbyte.workloadLauncher.images.containerOrchestrator.enabled.env" }} +- name: CONTAINER_ORCHESTRATOR_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONTAINER_ORCHESTRATOR_ENABLED +{{- end }} + +{{/* +Renders the workloadLauncher.containerOrchestrator.image value +*/}} +{{- define "airbyte.workloadLauncher.images.containerOrchestrator.image" }} + {{- include "imageUrl" (list .Values.workloadLauncher.containerOrchestrator.image $) }} +{{- end }} + +{{/* +Renders the workloadLauncher.images.containerOrchestrator.image environment variable +*/}} +{{- define "airbyte.workloadLauncher.images.containerOrchestrator.image.env" }} +- name: CONTAINER_ORCHESTRATOR_IMAGE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONTAINER_ORCHESTRATOR_IMAGE +{{- end }} + +{{/* +Renders the workloadLauncher.workloadInit.image value +*/}} +{{- define "airbyte.workloadLauncher.images.workloadInit.image" }} + {{- include "imageUrl" (list .Values.workloadLauncher.workloadInit.image $) }} +{{- end }} + +{{/* +Renders the workloadLauncher.images.workloadInit.image environment variable +*/}} +{{- define "airbyte.workloadLauncher.images.workloadInit.image.env" }} +- name: WORKLOAD_INIT_IMAGE + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: WORKLOAD_INIT_IMAGE +{{- end }} + +{{/* +Renders the set of all workloadLauncher.images environment variables +*/}} +{{- define "airbyte.workloadLauncher.images.envs" }} +{{- include "airbyte.workloadLauncher.images.connectorSidecar.image.env" . }} +{{- include "airbyte.workloadLauncher.images.containerOrchestrator.enabled.env" . }} +{{- include "airbyte.workloadLauncher.images.containerOrchestrator.image.env" . }} +{{- include "airbyte.workloadLauncher.images.workloadInit.image.env" . }} +{{- end }} + +{{/* +Renders the set of all workloadLauncher.images config map variables +*/}} +{{- define "airbyte.workloadLauncher.images.configVars" }} +CONNECTOR_SIDECAR_IMAGE: {{ include "imageUrl" (list .Values.workloadLauncher.connectorSidecar.image $) | quote }} +CONTAINER_ORCHESTRATOR_ENABLED: {{ include "airbyte.workloadLauncher.images.containerOrchestrator.enabled" . | quote }} +CONTAINER_ORCHESTRATOR_IMAGE: {{ include "imageUrl" (list .Values.workloadLauncher.containerOrchestrator.image $) | quote }} +WORKLOAD_INIT_IMAGE: {{ include "imageUrl" (list .Values.workloadLauncher.workloadInit.image $) | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/config/_workloads.tpl b/charts/v2/airbyte/templates/config/_workloads.tpl new file mode 100644 index 00000000000..0e5fbbf4234 --- /dev/null +++ b/charts/v2/airbyte/templates/config/_workloads.tpl @@ -0,0 +1,754 @@ + +{{/* DO NOT EDIT: This file was autogenerated. */}} + +{{/* + Workloads Configuration +*/}} + +{{/* +Renders the workloads secret name +*/}} +{{- define "airbyte.workloads.secretName" }} +{{- if .Values.global.workloads.secretName }} + {{- .Values.global.workloads.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the workloadLauncher.containerOrchestrator.secretName value +*/}} +{{- define "airbyte.workloads.containerOrchestrator.secretName" }} + {{- include "airbyte.workloads.secretName" . }} +{{- end }} + +{{/* +Renders the workloads.containerOrchestrator.secretName environment variable +*/}} +{{- define "airbyte.workloads.containerOrchestrator.secretName.env" }} +- name: CONTAINER_ORCHESTRATOR_SECRET_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONTAINER_ORCHESTRATOR_SECRET_NAME +{{- end }} + +{{/* +Renders the workloads.containerOrchestrator.dataplane.secretMountPath environment variable +*/}} +{{- define "airbyte.workloads.containerOrchestrator.dataplane.secretMountPath.env" }} +- name: CONTAINER_ORCHESTRATOR_DATA_PLANE_CREDS_SECRET_MOUNT_PATH + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONTAINER_ORCHESTRATOR_DATA_PLANE_CREDS_SECRET_MOUNT_PATH +{{- end }} + +{{/* +Renders the workloadLauncher.containerOrchestrator.dataplane.secretName value +*/}} +{{- define "airbyte.workloads.containerOrchestrator.dataplane.secretName" }} + {{- include "airbyte.workloads.secretName" . }} +{{- end }} + +{{/* +Renders the workloads.containerOrchestrator.dataplane.secretName environment variable +*/}} +{{- define "airbyte.workloads.containerOrchestrator.dataplane.secretName.env" }} +- name: CONTAINER_ORCHESTRATOR_DATA_PLANE_CREDS_SECRET_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONTAINER_ORCHESTRATOR_DATA_PLANE_CREDS_SECRET_NAME +{{- end }} + +{{/* +Renders the workloads.containerOrchestrator.javaOpts environment variable +*/}} +{{- define "airbyte.workloads.containerOrchestrator.javaOpts.env" }} +- name: CONTAINER_ORCHESTRATOR_JAVA_OPTS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONTAINER_ORCHESTRATOR_JAVA_OPTS +{{- end }} + +{{/* +Renders the workloads.containerOrchestrator.secretMountPath environment variable +*/}} +{{- define "airbyte.workloads.containerOrchestrator.secretMountPath.env" }} +- name: CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH +{{- end }} + +{{/* +Renders the workloadLauncher.kubernetesClientMaxIdleConnections value +*/}} +{{- define "airbyte.workloads.kubernetesClientMaxIdleConnections" }} + {{- .Values.workloadLauncher.kubernetesClientMaxIdleConnections | default 100 }} +{{- end }} + +{{/* +Renders the workloads.kubernetesClientMaxIdleConnections environment variable +*/}} +{{- define "airbyte.workloads.kubernetesClientMaxIdleConnections.env" }} +- name: KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS +{{- end }} + +{{/* +Renders the workloadLauncher.kuberentesClientMaxRetries value +*/}} +{{- define "airbyte.workloads.kuberentesClientMaxRetries" }} + {{- .Values.workloadLauncher.kuberentesClientMaxRetries | default "" }} +{{- end }} + +{{/* +Renders the workloads.kuberentesClientMaxRetries environment variable +*/}} +{{- define "airbyte.workloads.kuberentesClientMaxRetries.env" }} +- name: KUBERNETES_CLIENT_MAX_RETRIES + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: KUBERNETES_CLIENT_MAX_RETRIES +{{- end }} + +{{/* +Renders the global.workloads.pubSub.enabled value +*/}} +{{- define "airbyte.workloads.pubSub.enabled" }} + {{- .Values.global.workloads.pubSub.enabled | default false }} +{{- end }} + +{{/* +Renders the workloads.pubSub.enabled environment variable +*/}} +{{- define "airbyte.workloads.pubSub.enabled.env" }} +- name: PUB_SUB_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: PUB_SUB_ENABLED +{{- end }} + +{{/* +Renders the global.workloads.pubSub.topicName value +*/}} +{{- define "airbyte.workloads.pubSub.topicName" }} + {{- .Values.global.workloads.pubSub.topicName | default "" }} +{{- end }} + +{{/* +Renders the workloads.pubSub.topicName environment variable +*/}} +{{- define "airbyte.workloads.pubSub.topicName.env" }} +- name: PUB_SUB_TOPIC_NAME + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: PUB_SUB_TOPIC_NAME +{{- end }} + +{{/* +Renders the set of all workloads environment variables +*/}} +{{- define "airbyte.workloads.envs" }} +{{- include "airbyte.workloads.containerOrchestrator.secretName.env" . }} +{{- include "airbyte.workloads.containerOrchestrator.dataplane.secretMountPath.env" . }} +{{- include "airbyte.workloads.containerOrchestrator.dataplane.secretName.env" . }} +{{- include "airbyte.workloads.containerOrchestrator.javaOpts.env" . }} +{{- include "airbyte.workloads.containerOrchestrator.secretMountPath.env" . }} +{{- include "airbyte.workloads.kubernetesClientMaxIdleConnections.env" . }} +{{- include "airbyte.workloads.kuberentesClientMaxRetries.env" . }} +{{- include "airbyte.workloads.pubSub.enabled.env" . }} +{{- include "airbyte.workloads.pubSub.topicName.env" . }} +{{- end }} + +{{/* +Renders the set of all workloads config map variables +*/}} +{{- define "airbyte.workloads.configVars" }} +CONTAINER_ORCHESTRATOR_SECRET_NAME: {{ include "airbyte.workloads.secretName" . | quote }} +CONTAINER_ORCHESTRATOR_DATA_PLANE_CREDS_SECRET_MOUNT_PATH: {{ "/secrets/dataplane-creds" | quote }} +CONTAINER_ORCHESTRATOR_DATA_PLANE_CREDS_SECRET_NAME: {{ include "airbyte.workloads.secretName" . | quote }} +CONTAINER_ORCHESTRATOR_JAVA_OPTS: {{ "-XX:+ExitOnOutOfMemoryError -XX:MaxRAMPercentage=75.0 -javaagent:/app/dd-java-agent.jar -Ddd.profiling.enabled=true -XX:FlightRecorderOptions=stackdepth=256 -Ddd.trace.sample.rate=0.5 -Ddd.trace.request_header.tags=User-Agent:http.useragent" | quote }} +CONTAINER_ORCHESTRATOR_SECRET_MOUNT_PATH: {{ "/secrets/gcp-creds" | quote }} +KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS: {{ include "airbyte.workloads.kubernetesClientMaxIdleConnections" . | quote }} +KUBERNETES_CLIENT_MAX_RETRIES: {{ include "airbyte.workloads.kuberentesClientMaxRetries" . | quote }} +PUB_SUB_ENABLED: {{ include "airbyte.workloads.pubSub.enabled" . | quote }} +PUB_SUB_TOPIC_NAME: {{ include "airbyte.workloads.pubSub.topicName" . | quote }} +{{- end }} + +{{/* +Renders the workloads.queues secret name +*/}} +{{- define "airbyte.workloads.queues.secretName" }} +{{- if .Values.global.workloads.queues.secretName }} + {{- .Values.global.workloads.queues.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.workloads.queues.check value +*/}} +{{- define "airbyte.workloads.queues.check" }} + {{- join " " .Values.global.workloads.queues.check }} +{{- end }} + +{{/* +Renders the workloads.queues.check environment variable +*/}} +{{- define "airbyte.workloads.queues.check.env" }} +- name: DATA_CHECK_TASK_QUEUES + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATA_CHECK_TASK_QUEUES +{{- end }} + +{{/* +Renders the global.workloads.queues.discover value +*/}} +{{- define "airbyte.workloads.queues.discover" }} + {{- join " " .Values.global.workloads.queues.discover }} +{{- end }} + +{{/* +Renders the workloads.queues.discover environment variable +*/}} +{{- define "airbyte.workloads.queues.discover.env" }} +- name: DATA_DISCOVER_TASK_QUEUES + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATA_DISCOVER_TASK_QUEUES +{{- end }} + +{{/* +Renders the global.workloads.queues.sync value +*/}} +{{- define "airbyte.workloads.queues.sync" }} + {{- join " " .Values.global.workloads.queues.sync }} +{{- end }} + +{{/* +Renders the workloads.queues.sync environment variable +*/}} +{{- define "airbyte.workloads.queues.sync.env" }} +- name: DATA_SYNC_TASK_QUEUES + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DATA_SYNC_TASK_QUEUES +{{- end }} + +{{/* +Renders the set of all workloads.queues environment variables +*/}} +{{- define "airbyte.workloads.queues.envs" }} +{{- include "airbyte.workloads.queues.check.env" . }} +{{- include "airbyte.workloads.queues.discover.env" . }} +{{- include "airbyte.workloads.queues.sync.env" . }} +{{- end }} + +{{/* +Renders the set of all workloads.queues config map variables +*/}} +{{- define "airbyte.workloads.queues.configVars" }} +DATA_CHECK_TASK_QUEUES: {{ join " " .Values.global.workloads.queues.check | quote }} +DATA_DISCOVER_TASK_QUEUES: {{ join " " .Values.global.workloads.queues.discover | quote }} +DATA_SYNC_TASK_QUEUES: {{ join " " .Values.global.workloads.queues.sync | quote }} +{{- end }} + +{{/* +Renders the workloads.resources secret name +*/}} +{{- define "airbyte.workloads.resources.secretName" }} +{{- if .Values.global.workloads.resources.secretName }} + {{- .Values.global.workloads.resources.secretName | quote }} +{{- else }} + {{- .Release.Name }}-airbyte-secrets +{{- end }} +{{- end }} + +{{/* +Renders the global.workloads.resources.useConnectorResourceDefaults value +*/}} +{{- define "airbyte.workloads.resources.useConnectorResourceDefaults" }} + {{- .Values.global.workloads.resources.useConnectorResourceDefaults | default true }} +{{- end }} + +{{/* +Renders the workloads.resources.useConnectorResourceDefaults environment variable +*/}} +{{- define "airbyte.workloads.resources.useConnectorResourceDefaults.env" }} +- name: CONNECTOR_SPECIFIC_RESOURCE_DEFAULTS_ENABLED + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CONNECTOR_SPECIFIC_RESOURCE_DEFAULTS_ENABLED +{{- end }} + +{{/* +Renders the global.workloads.resources.mainContainer.cpu.limit value +*/}} +{{- define "airbyte.workloads.resources.mainContainer.cpu.limit" }} + {{- .Values.global.workloads.resources.mainContainer.cpu.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.mainContainer.cpu.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.mainContainer.cpu.limit.env" }} +- name: JOB_MAIN_CONTAINER_CPU_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_MAIN_CONTAINER_CPU_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.mainContainer.cpu.request value +*/}} +{{- define "airbyte.workloads.resources.mainContainer.cpu.request" }} + {{- .Values.global.workloads.resources.mainContainer.cpu.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.mainContainer.cpu.request environment variable +*/}} +{{- define "airbyte.workloads.resources.mainContainer.cpu.request.env" }} +- name: JOB_MAIN_CONTAINER_CPU_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_MAIN_CONTAINER_CPU_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.mainContainer.memory.limit value +*/}} +{{- define "airbyte.workloads.resources.mainContainer.memory.limit" }} + {{- .Values.global.workloads.resources.mainContainer.memory.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.mainContainer.memory.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.mainContainer.memory.limit.env" }} +- name: JOB_MAIN_CONTAINER_MEMORY_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_MAIN_CONTAINER_MEMORY_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.mainContainer.memory.request value +*/}} +{{- define "airbyte.workloads.resources.mainContainer.memory.request" }} + {{- .Values.global.workloads.resources.mainContainer.memory.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.mainContainer.memory.request environment variable +*/}} +{{- define "airbyte.workloads.resources.mainContainer.memory.request.env" }} +- name: JOB_MAIN_CONTAINER_MEMORY_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: JOB_MAIN_CONTAINER_MEMORY_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.check.cpu.limit value +*/}} +{{- define "airbyte.workloads.resources.check.cpu.limit" }} + {{- .Values.global.workloads.resources.check.cpu.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.check.cpu.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.check.cpu.limit.env" }} +- name: CHECK_JOB_MAIN_CONTAINER_CPU_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CHECK_JOB_MAIN_CONTAINER_CPU_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.check.cpu.request value +*/}} +{{- define "airbyte.workloads.resources.check.cpu.request" }} + {{- .Values.global.workloads.resources.check.cpu.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.check.cpu.request environment variable +*/}} +{{- define "airbyte.workloads.resources.check.cpu.request.env" }} +- name: CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.check.memory.limit value +*/}} +{{- define "airbyte.workloads.resources.check.memory.limit" }} + {{- .Values.global.workloads.resources.check.memory.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.check.memory.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.check.memory.limit.env" }} +- name: CHECK_JOB_MAIN_CONTAINER_MEMORY_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CHECK_JOB_MAIN_CONTAINER_MEMORY_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.check.memory.request value +*/}} +{{- define "airbyte.workloads.resources.check.memory.request" }} + {{- .Values.global.workloads.resources.check.memory.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.check.memory.request environment variable +*/}} +{{- define "airbyte.workloads.resources.check.memory.request.env" }} +- name: CHECK_JOB_MAIN_CONTAINER_MEMORY_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: CHECK_JOB_MAIN_CONTAINER_MEMORY_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.discover.cpu.limit value +*/}} +{{- define "airbyte.workloads.resources.discover.cpu.limit" }} + {{- .Values.global.workloads.resources.discover.cpu.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.discover.cpu.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.discover.cpu.limit.env" }} +- name: DISCOVER_JOB_MAIN_CONTAINER_CPU_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DISCOVER_JOB_MAIN_CONTAINER_CPU_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.discover.cpu.request value +*/}} +{{- define "airbyte.workloads.resources.discover.cpu.request" }} + {{- .Values.global.workloads.resources.discover.cpu.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.discover.cpu.request environment variable +*/}} +{{- define "airbyte.workloads.resources.discover.cpu.request.env" }} +- name: DISCOVER_JOB_MAIN_CONTAINER_CPU_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DISCOVER_JOB_MAIN_CONTAINER_CPU_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.discover.memory.limit value +*/}} +{{- define "airbyte.workloads.resources.discover.memory.limit" }} + {{- .Values.global.workloads.resources.discover.memory.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.discover.memory.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.discover.memory.limit.env" }} +- name: DISCOVER_JOB_MAIN_CONTAINER_MEMORY_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DISCOVER_JOB_MAIN_CONTAINER_MEMORY_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.discover.memory.request value +*/}} +{{- define "airbyte.workloads.resources.discover.memory.request" }} + {{- .Values.global.workloads.resources.discover.memory.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.discover.memory.request environment variable +*/}} +{{- define "airbyte.workloads.resources.discover.memory.request.env" }} +- name: DISCOVER_JOB_MAIN_CONTAINER_MEMORY_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: DISCOVER_JOB_MAIN_CONTAINER_MEMORY_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.fileTransfer.storage.limit value +*/}} +{{- define "airbyte.workloads.resources.fileTransfer.storage.limit" }} + {{- .Values.global.workloads.resources.fileTransfer.storage.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.fileTransfer.storage.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.fileTransfer.storage.limit.env" }} +- name: FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.fileTransfer.storage.request value +*/}} +{{- define "airbyte.workloads.resources.fileTransfer.storage.request" }} + {{- .Values.global.workloads.resources.fileTransfer.storage.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.fileTransfer.storage.request environment variable +*/}} +{{- define "airbyte.workloads.resources.fileTransfer.storage.request.env" }} +- name: FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.replication.cpu.limit value +*/}} +{{- define "airbyte.workloads.resources.replication.cpu.limit" }} + {{- .Values.global.workloads.resources.replication.cpu.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.replication.cpu.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.replication.cpu.limit.env" }} +- name: REPLICATION_ORCHESTRATOR_CPU_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: REPLICATION_ORCHESTRATOR_CPU_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.replication.cpu.request value +*/}} +{{- define "airbyte.workloads.resources.replication.cpu.request" }} + {{- .Values.global.workloads.resources.replication.cpu.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.replication.cpu.request environment variable +*/}} +{{- define "airbyte.workloads.resources.replication.cpu.request.env" }} +- name: REPLICATION_ORCHESTRATOR_CPU_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: REPLICATION_ORCHESTRATOR_CPU_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.replication.memory.limit value +*/}} +{{- define "airbyte.workloads.resources.replication.memory.limit" }} + {{- .Values.global.workloads.resources.replication.memory.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.replication.memory.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.replication.memory.limit.env" }} +- name: REPLICATION_ORCHESTRATOR_MEMORY_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: REPLICATION_ORCHESTRATOR_MEMORY_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.replication.memory.request value +*/}} +{{- define "airbyte.workloads.resources.replication.memory.request" }} + {{- .Values.global.workloads.resources.replication.memory.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.replication.memory.request environment variable +*/}} +{{- define "airbyte.workloads.resources.replication.memory.request.env" }} +- name: REPLICATION_ORCHESTRATOR_MEMORY_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: REPLICATION_ORCHESTRATOR_MEMORY_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.sidecar.cpu.limit value +*/}} +{{- define "airbyte.workloads.resources.sidecar.cpu.limit" }} + {{- .Values.global.workloads.resources.sidecar.cpu.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.sidecar.cpu.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.sidecar.cpu.limit.env" }} +- name: SIDECAR_MAIN_CONTAINER_CPU_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SIDECAR_MAIN_CONTAINER_CPU_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.sidecar.cpu.request value +*/}} +{{- define "airbyte.workloads.resources.sidecar.cpu.request" }} + {{- .Values.global.workloads.resources.sidecar.cpu.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.sidecar.cpu.request environment variable +*/}} +{{- define "airbyte.workloads.resources.sidecar.cpu.request.env" }} +- name: SIDECAR_MAIN_CONTAINER_CPU_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SIDECAR_MAIN_CONTAINER_CPU_REQUEST +{{- end }} + +{{/* +Renders the global.workloads.resources.sidecar.memory.limit value +*/}} +{{- define "airbyte.workloads.resources.sidecar.memory.limit" }} + {{- .Values.global.workloads.resources.sidecar.memory.limit | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.sidecar.memory.limit environment variable +*/}} +{{- define "airbyte.workloads.resources.sidecar.memory.limit.env" }} +- name: SIDECAR_MAIN_CONTAINER_MEMORY_LIMIT + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SIDECAR_MAIN_CONTAINER_MEMORY_LIMIT +{{- end }} + +{{/* +Renders the global.workloads.resources.sidecar.memory.request value +*/}} +{{- define "airbyte.workloads.resources.sidecar.memory.request" }} + {{- .Values.global.workloads.resources.sidecar.memory.request | default "" }} +{{- end }} + +{{/* +Renders the workloads.resources.sidecar.memory.request environment variable +*/}} +{{- define "airbyte.workloads.resources.sidecar.memory.request.env" }} +- name: SIDECAR_MAIN_CONTAINER_MEMORY_REQUEST + valueFrom: + configMapKeyRef: + name: {{ .Release.Name }}-airbyte-env + key: SIDECAR_MAIN_CONTAINER_MEMORY_REQUEST +{{- end }} + +{{/* +Renders the set of all workloads.resources environment variables +*/}} +{{- define "airbyte.workloads.resources.envs" }} +{{- include "airbyte.workloads.resources.useConnectorResourceDefaults.env" . }} +{{- include "airbyte.workloads.resources.mainContainer.cpu.limit.env" . }} +{{- include "airbyte.workloads.resources.mainContainer.cpu.request.env" . }} +{{- include "airbyte.workloads.resources.mainContainer.memory.limit.env" . }} +{{- include "airbyte.workloads.resources.mainContainer.memory.request.env" . }} +{{- include "airbyte.workloads.resources.check.cpu.limit.env" . }} +{{- include "airbyte.workloads.resources.check.cpu.request.env" . }} +{{- include "airbyte.workloads.resources.check.memory.limit.env" . }} +{{- include "airbyte.workloads.resources.check.memory.request.env" . }} +{{- include "airbyte.workloads.resources.discover.cpu.limit.env" . }} +{{- include "airbyte.workloads.resources.discover.cpu.request.env" . }} +{{- include "airbyte.workloads.resources.discover.memory.limit.env" . }} +{{- include "airbyte.workloads.resources.discover.memory.request.env" . }} +{{- include "airbyte.workloads.resources.fileTransfer.storage.limit.env" . }} +{{- include "airbyte.workloads.resources.fileTransfer.storage.request.env" . }} +{{- include "airbyte.workloads.resources.replication.cpu.limit.env" . }} +{{- include "airbyte.workloads.resources.replication.cpu.request.env" . }} +{{- include "airbyte.workloads.resources.replication.memory.limit.env" . }} +{{- include "airbyte.workloads.resources.replication.memory.request.env" . }} +{{- include "airbyte.workloads.resources.sidecar.cpu.limit.env" . }} +{{- include "airbyte.workloads.resources.sidecar.cpu.request.env" . }} +{{- include "airbyte.workloads.resources.sidecar.memory.limit.env" . }} +{{- include "airbyte.workloads.resources.sidecar.memory.request.env" . }} +{{- end }} + +{{/* +Renders the set of all workloads.resources config map variables +*/}} +{{- define "airbyte.workloads.resources.configVars" }} +CONNECTOR_SPECIFIC_RESOURCE_DEFAULTS_ENABLED: {{ include "airbyte.workloads.resources.useConnectorResourceDefaults" . | quote }} +JOB_MAIN_CONTAINER_CPU_LIMIT: {{ include "airbyte.workloads.resources.mainContainer.cpu.limit" . | quote }} +JOB_MAIN_CONTAINER_CPU_REQUEST: {{ include "airbyte.workloads.resources.mainContainer.cpu.request" . | quote }} +JOB_MAIN_CONTAINER_MEMORY_LIMIT: {{ include "airbyte.workloads.resources.mainContainer.memory.limit" . | quote }} +JOB_MAIN_CONTAINER_MEMORY_REQUEST: {{ include "airbyte.workloads.resources.mainContainer.memory.request" . | quote }} +CHECK_JOB_MAIN_CONTAINER_CPU_LIMIT: {{ include "airbyte.workloads.resources.check.cpu.limit" . | quote }} +CHECK_JOB_MAIN_CONTAINER_CPU_REQUEST: {{ include "airbyte.workloads.resources.check.cpu.request" . | quote }} +CHECK_JOB_MAIN_CONTAINER_MEMORY_LIMIT: {{ include "airbyte.workloads.resources.check.memory.limit" . | quote }} +CHECK_JOB_MAIN_CONTAINER_MEMORY_REQUEST: {{ include "airbyte.workloads.resources.check.memory.request" . | quote }} +DISCOVER_JOB_MAIN_CONTAINER_CPU_LIMIT: {{ include "airbyte.workloads.resources.discover.cpu.limit" . | quote }} +DISCOVER_JOB_MAIN_CONTAINER_CPU_REQUEST: {{ include "airbyte.workloads.resources.discover.cpu.request" . | quote }} +DISCOVER_JOB_MAIN_CONTAINER_MEMORY_LIMIT: {{ include "airbyte.workloads.resources.discover.memory.limit" . | quote }} +DISCOVER_JOB_MAIN_CONTAINER_MEMORY_REQUEST: {{ include "airbyte.workloads.resources.discover.memory.request" . | quote }} +FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT: {{ include "airbyte.workloads.resources.fileTransfer.storage.limit" . | quote }} +FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST: {{ include "airbyte.workloads.resources.fileTransfer.storage.request" . | quote }} +REPLICATION_ORCHESTRATOR_CPU_LIMIT: {{ include "airbyte.workloads.resources.replication.cpu.limit" . | quote }} +REPLICATION_ORCHESTRATOR_CPU_REQUEST: {{ include "airbyte.workloads.resources.replication.cpu.request" . | quote }} +REPLICATION_ORCHESTRATOR_MEMORY_LIMIT: {{ include "airbyte.workloads.resources.replication.memory.limit" . | quote }} +REPLICATION_ORCHESTRATOR_MEMORY_REQUEST: {{ include "airbyte.workloads.resources.replication.memory.request" . | quote }} +SIDECAR_MAIN_CONTAINER_CPU_LIMIT: {{ include "airbyte.workloads.resources.sidecar.cpu.limit" . | quote }} +SIDECAR_MAIN_CONTAINER_CPU_REQUEST: {{ include "airbyte.workloads.resources.sidecar.cpu.request" . | quote }} +SIDECAR_MAIN_CONTAINER_MEMORY_LIMIT: {{ include "airbyte.workloads.resources.sidecar.memory.limit" . | quote }} +SIDECAR_MAIN_CONTAINER_MEMORY_REQUEST: {{ include "airbyte.workloads.resources.sidecar.memory.request" . | quote }} +{{- end }} diff --git a/charts/v2/airbyte/templates/env-configmap.yaml b/charts/v2/airbyte/templates/env-configmap.yaml index e58420bbe0e..7b6986cea71 100644 --- a/charts/v2/airbyte/templates/env-configmap.yaml +++ b/charts/v2/airbyte/templates/env-configmap.yaml @@ -9,149 +9,45 @@ metadata: labels: {{- include "airbyte.labels" . | nindent 4 }} data: - AIRBYTE_VERSION: {{ .Values.version | default .Chart.AppVersion }} - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - AIRBYTE_EDITION: "pro" - {{- else }} - AIRBYTE_EDITION: "community" - {{- end }} - - {{- if $airbyteYmlDict }} - AIRBYTE_URL: {{ (index $airbyteYmlDict "webapp-url") | quote }} - {{- else}} - AIRBYTE_URL: {{ .Values.global.airbyteUrl | quote }} - {{- end }} - - AIRBYTE_SERVER_HOST: {{ .Release.Name }}-airbyte-server-svc:{{ .Values.server.service.port }} - API_URL: {{ .Values.webapp.api.url }} - CONNECTOR_BUILDER_API_URL: {{ index .Values.webapp "connector-builder-server" "url" | quote }} - CONFIG_API_HOST: http://{{ .Release.Name }}-airbyte-server-svc:{{ .Values.server.service.port }} # temporary solution for oss kube deploys for airbyte api server until that server is wrapped into the config server - CONFIG_ROOT: /configs - CONFIGS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION: "0.35.15.001" - DATA_DOCKER_MOUNT: airbyte_data - DB_DOCKER_MOUNT: airbyte_db + {{- include "airbyte.auth.bootstrap.configVars" . | nindent 2 }} + {{- include "airbyte.auth.identityProvider.configVars" . | nindent 2 }} + {{- include "airbyte.auth.instanceAdmin.enterprise.configVars" . | nindent 2 }} + {{- include "airbyte.auth.security.configVars" . | nindent 2 }} + {{- include "airbyte.common.configVars" . | nindent 2 }} + {{- include "airbyte.connector.configVars" . | nindent 2 }} + {{- include "airbyte.cron.configVars" . | nindent 2 }} {{- include "airbyte.database.configVars" . | nindent 2 }} - KEYCLOAK_DATABASE_URL: {{ include "airbyte.keycloak.database.url" . | quote }} - GOOGLE_APPLICATION_CREDENTIALS: {{ include "airbyte.gcpLogCredentialsPath" . | quote }} - INTERNAL_API_HOST: http://{{ .Release.Name }}-airbyte-server-svc:{{ .Values.server.service.port }} - WORKLOAD_API_HOST: http://{{ .Release.Name }}-workload-api-server-svc:{{ .Values.workloadApiServer.service.port }} - - - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - INITIAL_USER_FIRST_NAME: {{ .Values.global.auth.instanceAdmin.firstName }} - INITIAL_USER_LAST_NAME: {{ .Values.global.auth.instanceAdmin.lastName }} - {{- if .Values.global.auth.identityProvider }} - IDENTITY_PROVIDER_TYPE: {{ .Values.global.auth.identityProvider.type }} - OIDC_DOMAIN: {{ .Values.global.auth.identityProvider.oidc.domain }} - OIDC_APP_NAME: {{ .Values.global.auth.identityProvider.oidc.appName }} - {{- end }} - KEYCLOAK_INTERNAL_HOST: {{ .Release.Name }}-airbyte-keycloak-svc:{{ .Values.keycloak.service.port }} - KEYCLOAK_PORT: {{ .Values.keycloak.service.port | quote }} - - {{- if $airbyteYmlDict }} - KEYCLOAK_HOSTNAME_URL: {{ printf "%s/auth" (index $airbyteYmlDict "webapp-url") | quote }} - {{- else }} - KEYCLOAK_HOSTNAME_URL: {{ printf "%s/auth" .Values.global.airbyteUrl | quote }} - {{- end }} - - KEYCLOAK_JAVA_OPTS_APPEND: -Djgroups.dns.query={{ .Release.Name }}-airbyte-keycloak-headless-svc - {{- else }} - KEYCLOAK_INTERNAL_HOST: localhost # just a placeholder so that nginx template is valid - shouldn't be used when edition isn't "pro" - {{- end }} - - CONNECTOR_BUILDER_API_HOST: {{ .Release.Name }}-airbyte-connector-builder-server-svc:{{ .Values.connectorBuilderServer.service.port }} - - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - {{- if $airbyteYmlDict }} - AIRBYTE_API_HOST: {{ printf "%s/api/public" (index $airbyteYmlDict "webapp-url") | quote }} - {{- else }} - AIRBYTE_API_HOST: {{ printf "%s/api/public" .Values.global.airbyteUrl | quote }} - {{- end }} - {{- else if (eq .Values.global.edition "community")}} - AIRBYTE_API_HOST: http://localhost:{{ .Values.server.service.port }}/api/public - {{- else }} - AIRBYTE_API_HOST: http://{{ .Release.Name }}-airbyte-server-svc:{{ .Values.server.service.port }}/api/public - {{- end }} - - {{- if $.Values.global.jobs.kube.annotations }} - JOB_KUBE_ANNOTATIONS: {{ $.Values.global.jobs.kube.annotations | include "airbyte.flattenMap" | quote }} - {{- end }} - - {{- if $.Values.global.jobs.kube.labels }} - JOB_KUBE_LABELS: {{ $.Values.global.jobs.kube.labels | include "airbyte.flattenMap" | quote }} - {{- end }} - - {{- if $.Values.global.jobs.kube.nodeSelector }} - JOB_KUBE_NODE_SELECTORS: {{ $.Values.global.jobs.kube.nodeSelector | include "airbyte.flattenMap" | quote }} - {{- end }} - - {{- if $.Values.global.jobs.kube.tolerations }} - JOB_KUBE_TOLERATIONS: {{ $.Values.global.jobs.kube.tolerations | include "airbyte.flattenArrayMap" | quote }} - {{- end }} - - JOB_KUBE_BUSYBOX_IMAGE: {{ include "imageUrl" (list $.Values.global.jobs.kube.images.busybox $) }} - JOB_KUBE_CURL_IMAGE: {{ include "imageUrl" (list $.Values.global.jobs.kube.images.curl $) }} - {{ $workloadLauncher := .Values.workloadLauncher }} - CONTAINER_ORCHESTRATOR_IMAGE: {{ include "imageUrl" (list $workloadLauncher.containerOrchestrator.image $) }} - WORKLOAD_INIT_IMAGE: {{ include "imageUrl" (list $workloadLauncher.workloadInit.image $) }} - CONNECTOR_SIDECAR_IMAGE: {{ include "imageUrl" (list $workloadLauncher.connectorSidecar.image $) }} - - JOB_MAIN_CONTAINER_CPU_LIMIT: {{ ((.Values.global.jobs.resources | default dict).limits | default dict).cpu | default "" | quote }} - JOB_MAIN_CONTAINER_CPU_REQUEST: {{ ((.Values.global.jobs.resources | default dict).requests | default dict).cpu | default "" | quote }} - JOB_MAIN_CONTAINER_MEMORY_LIMIT: {{ ((.Values.global.jobs.resources | default dict).limits | default dict).memory | default "" | quote }} - JOB_MAIN_CONTAINER_MEMORY_REQUEST: {{ ((.Values.global.jobs.resources | default dict).requests | default dict).memory | default "" | quote }} - - {{- if $.Values.global.jobs.kube.main_container_image_pull_secret }} - JOB_KUBE_MAIN_CONTAINER_IMAGE_PULL_SECRET: {{ $.Values.global.jobs.kube.main_container_image_pull_secret }} - {{- end }} - JOBS_DATABASE_MINIMUM_FLYWAY_MIGRATION_VERSION: "0.29.15.001" - LOCAL_ROOT: /tmp/airbyte_local - LOG_LEVEL: {{ default "INFO" (((.Values.global).logging).level) }} - MICROMETER_METRICS_ENABLED: "false" - MICROMETER_METRICS_STATSD_FLAVOR: "datadog" - RUN_DATABASE_MIGRATION_ON_STARTUP: {{ .Values.airbyteBootloader.runDatabaseMigrationsOnStartup | default "true" | quote }} - SEGMENT_WRITE_KEY: 7UDdp5K55CyiGgsauOr2pNNujGvmhaeu - # Storage start - STORAGE_TYPE: {{ (((.Values.global).storage).type) | default "minio" | quote }} - STORAGE_BUCKET_ACTIVITY_PAYLOAD: {{ ((((.Values.global).storage).bucket).activityPayload) | default "airbyte-storage" | quote }} - STORAGE_BUCKET_LOG: {{ ((((.Values.global).storage).bucket).log) | default "airbyte-storage" | quote }} - STORAGE_BUCKET_STATE: {{ ((((.Values.global).storage).bucket).state) | default "airbyte-storage" | quote }} - STORAGE_BUCKET_WORKLOAD_OUTPUT: {{ ((((.Values.global).storage).bucket).workloadOutput) | default "airbyte-storage" | quote }} - - {{- with .Values.global.storage.s3 }} - AWS_DEFAULT_REGION: {{ .region | default "" | quote }} - {{- end}} - - MINIO_ENDPOINT: {{ include "airbyte.storage.minio.endpoint" . | quote }} - S3_PATH_STYLE_ACCESS: {{ include "airbyte.s3PathStyleAccess" . | quote }} - # Storage end - STATSD_HOST: "localhost" - STATSD_PORT: "8125" - TEMPORAL_HOST: {{ .Release.Name }}-temporal:{{ .Values.temporal.service.port }} - TEMPORAL_WORKER_PORTS: 9001,9002,9003,9004,9005,9006,9007,9008,9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,9022,9023,9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039,9040 - TRACKING_STRATEGY: segment - WEBAPP_URL: {{ .Values.webapp.url | default (printf "http://%s-airbyte-webapp-svc:%d" .Release.Name (.Values.webapp.service.port | int)) }} - WORKER_ENVIRONMENT: kubernetes - WORKSPACE_DOCKER_MOUNT: airbyte_workspace - WORKSPACE_ROOT: /workspace - METRIC_CLIENT: {{ .Values.global.metrics.metricClient | default "" | quote }} - OTEL_COLLECTOR_ENDPOINT: {{ .Values.global.metrics.otelCollectorEndpoint | default "" | quote }} - ACTIVITY_MAX_ATTEMPT: {{ .Values.worker.activityMaxAttempt | default "" | quote }} - ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS: {{ .Values.worker.activityInitialDelayBetweenAttemptsSeconds | default "" | quote }} - ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS: {{ .Values.worker.activityMaxDelayBetweenAttemptsSeconds | default "" | quote }} - WORKFLOW_FAILURE_RESTART_DELAY_SECONDS: "" - FILE_TRANSFER_EPHEMERAL_STORAGE_LIMIT: 5G - FILE_TRANSFER_EPHEMERAL_STORAGE_REQUEST: 5G + {{- include "airbyte.database.migrations.configVars" . | nindent 2 }} + {{- include "airbyte.datadog.configVars" . | nindent 2 }} + {{- include "airbyte.featureFlags.configVars" . | nindent 2 }} + {{- include "airbyte.java.configVars" . | nindent 2 }} + {{- include "airbyte.jobs.configVars" . | nindent 2 }} + {{- include "airbyte.keycloak.admin.client.configVars" . | nindent 2 }} + {{- include "airbyte.keycloak.configVars" . | nindent 2 }} + {{- include "airbyte.keycloak.database.configVars" . | nindent 2 }} + {{- include "airbyte.logging.configVars" . | nindent 2 }} + {{- include "airbyte.metrics.configVars" . | nindent 2 }} + {{- include "airbyte.micronaut.configVars" . | nindent 2 }} + {{- include "airbyte.otel.configVars" . | nindent 2 }} + {{- include "airbyte.secretsManager.configVars" . | nindent 2 }} + {{- include "airbyte.storage.configVars" . | nindent 2 }} + {{- include "airbyte.temporal.cli.configVars" . | nindent 2 }} + {{- include "airbyte.temporal.cloud.configVars" . | nindent 2 }} + {{- include "airbyte.temporal.configVars" . | nindent 2 }} + {{- include "airbyte.temporal.sdk.configVars" . | nindent 2 }} + {{- include "airbyte.temporal.worker.configVars" . | nindent 2 }} + {{- include "airbyte.topology.configVars" . | nindent 2 }} + {{- include "airbyte.tracking.configVars" . | nindent 2 }} + {{- include "airbyte.webapp.configVars" . | nindent 2 }} + {{- include "airbyte.worker.configVars" . | nindent 2 }} + {{- include "airbyte.workloadApiServer.configVars" . | nindent 2 }} + {{- include "airbyte.workloadLauncher.configVars" . | nindent 2 }} + {{- include "airbyte.workloadLauncher.images.configVars" . | nindent 2 }} + {{- include "airbyte.workloads.configVars" . | nindent 2 }} + {{- include "airbyte.workloads.queues.configVars" . | nindent 2 }} + {{- include "airbyte.workloads.resources.configVars" . | nindent 2 }} LAUNCHER_MICRONAUT_ENVIRONMENTS: "control-plane,oss" WORKERS_MICRONAUT_ENVIRONMENTS: "control-plane" CRON_MICRONAUT_ENVIRONMENTS: "control-plane" SERVER_MICRONAUT_ENVIRONMENTS: "control-plane" - SHOULD_RUN_NOTIFY_WORKFLOWS: "true" - MAX_NOTIFY_WORKERS: {{ .Values.worker.maxNotifyWorkers | default "5" | quote }} - KUBERNETES_CLIENT_MAX_IDLE_CONNECTIONS: "" - WORKLOAD_LAUNCHER_PARALLELISM: "10" - CONNECTOR_BUILDER_SERVER_API_HOST: http://{{ .Release.Name }}-airbyte-connector-builder-server-svc:{{ .Values.connectorBuilderServer.service.port }} - PUB_SUB_ENABLED: "false" - PUB_SUB_TOPIC_NAME: "" - ENTERPRISE_SOURCE_STUBS_URL: {{ .Values.server.env_vars.ENTERPRISE_SOURCE_STUBS_URL | default "https://connectors.airbyte.com/files/resources/connector_stubs/v0/connector_stubs.json" }} diff --git a/charts/v2/airbyte/templates/minio.yaml b/charts/v2/airbyte/templates/minio.yaml index a47ea4e21ce..0f9e9835ae0 100644 --- a/charts/v2/airbyte/templates/minio.yaml +++ b/charts/v2/airbyte/templates/minio.yaml @@ -38,26 +38,16 @@ spec: - server - /storage env: - # Minio access key and secret key. This must match the S3_ACCESS_KEY_ID and S3_SECRET_ACCESS_KEY declared in /dev/.env. - - name: MINIO_ROOT_USER - valueFrom: - secretKeyRef: - name: {{ .Release.Name }}-airbyte-secrets - key: MINIO_ACCESS_KEY_ID - - name: MINIO_ROOT_PASSWORD - valueFrom: - secretKeyRef: - name: {{ .Release.Name }}-airbyte-secrets - key: MINIO_SECRET_ACCESS_KEY + {{- include "airbyte.minio.envs" . | nindent 12 }} ports: - containerPort: 9000 resources: requests: - memory: "1024Mi" - cpu: "200m" + memory: {{ .Values.minio.resources.requests.memory }} + cpu: {{ .Values.minio.resources.requests.cpu }} limits: - memory: "1024Mi" - cpu: "200m" + memory: {{ .Values.minio.resources.limits.memory }} + cpu: {{ .Values.minio.resources.limits.cpu }} # Mount the volume into the pod securityContext: allowPrivilegeEscalation: false @@ -153,33 +143,6 @@ spec: seccompProfile: type: RuntimeDefault env: - {{- if and (eq (lower (default "" .Values.global.storage.type)) "minio") (((.Values.global).storage).minio) }} - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.storage.storageSecretName }} - key: {{ include "airbyte.minioAccessKeyIdSecretKey" .Values.global.storage.minio.accessKeyIdSecretKey }} - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - name: {{ include "airbyte.secretStoreName" .Values.global.storage.storageSecretName }} - key: {{ include "airbyte.minioSecretAccessKeySecretKey" .Values.global.storage.minio.secretAccessKeySecretKey }} - {{- else }} - # this is for the internally deployed minio - - name: MINIO_ACCESS_KEY - valueFrom: - secretKeyRef: - name: {{ .Release.Name }}-airbyte-secrets - key: MINIO_ACCESS_KEY_ID - - name: MINIO_SECRET_KEY - valueFrom: - secretKeyRef: - name: {{ .Release.Name }}-airbyte-secrets - key: MINIO_SECRET_ACCESS_KEY - {{- end }} - - name: MINIO_ENDPOINT - valueFrom: - configMapKeyRef: - name: {{ .Release.Name }}-airbyte-env - key: MINIO_ENDPOINT + {{- include "airbyte.minio.envs" . | nindent 8 }} + {{- include "airbyte.storage.minio.endpoint.env" . | nindent 8 }} {{- end }} diff --git a/charts/v2/airbyte/templates/secret.yaml b/charts/v2/airbyte/templates/secret.yaml index 29ce0ea358b..80603df944b 100644 --- a/charts/v2/airbyte/templates/secret.yaml +++ b/charts/v2/airbyte/templates/secret.yaml @@ -9,10 +9,17 @@ metadata: {{- include "airbyte.labels" . | nindent 4 }} type: Opaque stringData: + {{- include "airbyte.auth.bootstrap.secrets" . | nindent 2 }} + {{- include "airbyte.auth.identityProvider.secrets" . | nindent 2 }} + {{- include "airbyte.auth.instanceAdmin.enterprise.secrets" . | nindent 2 }} + {{- include "airbyte.customerio.secrets" . | nindent 2 }} {{- include "airbyte.database.secrets" . | nindent 2 }} + {{- include "airbyte.enterprise.secrets" . | nindent 2 }} + {{- include "airbyte.featureFlags.secrets" . | nindent 2 }} + {{- include "airbyte.keycloak.admin.user.secrets" . | nindent 2 }} + {{- include "airbyte.keycloak.database.secrets" . | nindent 2 }} + {{- include "airbyte.secretsManager.secrets" . | nindent 2 }} {{- include "airbyte.storage.secrets" . | nindent 2 }} - {{- if or (eq .Values.global.edition "pro") (eq .Values.global.edition "enterprise") }} - KEYCLOAK_ADMIN_USER: {{ .Values.keycloak.auth.adminUsername | quote }} - KEYCLOAK_ADMIN_PASSWORD: {{ .Values.keycloak.auth.adminPassword | quote }} - {{- end }} - WORKLOAD_API_BEARER_TOKEN: {{ index ".Values.workloadApi.bearerToken" | quote }} + {{- include "airbyte.temporal.cli.secrets" . | nindent 2 }} + {{- include "airbyte.temporal.cloud.secrets" . | nindent 2 }} + {{- include "airbyte.workloadApiServer.secrets" . | nindent 2 }} diff --git a/charts/v2/airbyte/values.yaml b/charts/v2/airbyte/values.yaml index 05a2d83e772..3a0a174d47d 100644 --- a/charts/v2/airbyte/values.yaml +++ b/charts/v2/airbyte/values.yaml @@ -1,7 +1,3 @@ -### TEST FOR RELEASE WORKFLOW - - -# Global params that are overwritten with umbrella chart global: # -- Service Account name override serviceAccountName: &service-account-name "airbyte-admin" @@ -10,13 +6,16 @@ global: # -- Edition; "community" or "pro" edition: "community" - airbyteYml: "" + local: false + + cluster: + type: "control-plane" # or "data-plane" enterprise: # -- Secret name where an Airbyte license key is stored - secretName: "airbyte-config-secrets" + secretName: "" # -- The key within `licenseKeySecretName` where the Airbyte license key is stored - licenseKeySecretKey: "license-key" + licenseKeySecretKey: "" # -- The URL where Airbyte will be reached; This should match your Ingress host airbyteUrl: "" @@ -36,39 +35,56 @@ global: auth: # -- Whether auth is enabled enabled: false + # -- Admin user configuration instanceAdmin: # -- Secret name where the instanceAdmin configuration is stored - secretName: "airbyte-config-secrets" + secretName: "airbyte-auth-secrets" # -- The first name of the initial user firstName: "" # -- The last name of the initial user lastName: "" # -- The key within `emailSecretName` where the initial user's email is stored - emailSecretKey: "instance-admin-email" + emailSecretKey: "" # -- The key within `passwordSecretName` where the initial user's password is stored - passwordSecretKey: "instance-admin-password" + passwordSecretKey: "" # -- SSO Identify Provider configuration; (requires Enterprise) - #identityProvider: - # # -- Secret name where the OIDC configuration is stored - # secretName: "airbyte-config-secrets" - # # -- The identity provider type (e.g. oidc) - # type: "" - # # -- OIDC configuration (required if `auth.identityProvider.type` is "oidc") - # oidc: - # # -- OIDC application domain - # domain: "" - # # -- OIDC application name - # appName: "" - # # -- The key within `clientIdSecretName` where the OIDC client id is stored - # clientIdSecretKey: "client-id" - # # -- The key within `clientSecretSecretName` where the OIDC client secret is stored - # clientSecretSecretKey: "client-secret" + identityProvider: + # -- Secret name where the OIDC configuration is stored + secretName: "" + # -- The identity provider type (e.g. oidc) + type: "oidc" + + # -- OIDC configuration (required if `auth.identityProvider.type` is "oidc") + oidc: + # -- OIDC application domain + domain: "" + # -- OIDC application name + appName: "" + # -- The key within `clientIdSecretName` where the OIDC client id is stored + clientIdSecretKey: "" + # -- The key within `clientSecretSecretName` where the OIDC client secret is stored + clientSecretSecretKey: "" + + # -- Security configuration + security: + secretName: "" + + cookieSecureSetting: true + cookieSameSiteSetting: "Strict" + + jwtSignatureSecretKey: "" + api: + authHeaderName: "X-Airbyte-Auth" + # -- Environment variables env_vars: {} + # -- Secrets + secrets: {} + # -- Database configuration database: type: "internal" # "external" @@ -95,45 +111,183 @@ global: # -- The key within `secretName` where the password is stored #passwordSecretKey: "" # e.g."database-password" + migrations: + runAtStartup: true + + configDb: + minimumFlywayMigrationVersion: "" + + jobsDb: + minimumFlywayMigrationVersion: "" + storage: # -- The storage backend type. Supports s3, gcs, azure, minio (default) type: minio # default storage used # -- Secret name where storage provider credentials are stored #storageSecretName: "airbyte-config-secrets" + # Minio + minio: + accessKeyId: minio + secretAccessKey: "minio123" + endpoint: "http://airbyte-minio-svc:9000" + + bucket: + log: airbyte-bucket + state: airbyte-bucket + workloadOutput: airbyte-bucket + activityPayload: airbyte-bucket + # S3 - #bucket: ## S3 bucket names that you've created. We recommend storing the following all in one bucket. - # log: airbyte-bucket - # state: airbyte-bucket - # workloadOutput: airbyte-bucket #s3: # region: "" ## e.g. us-east-1 # authenticationType: credentials ## Use "credentials" or "instanceProfile" + # accessKeyId: "" + # secretAccessKey: "" # GCS - #bucket: ## GCS bucket names that you've created. We recommend storing the following all in one bucket. - # log: airbyte-bucket - # state: airbyte-bucket - # workloadOutput: airbyte-bucket #gcs: # projectId: # credentialsJson: /secrets/gcs-log-creds/gcp.json # Azure - #bucket: ## Azure Blob Storage container names that you've created. We recommend storing the following all in one bucket. - # log: airbyte-bucket - # state: airbyte-bucket - # workloadOutput: airbyte-bucket #azure: # # one of the following: connectionString, connectionStringSecretKey # connectionString: # connectionStringSecretKey: + secretsManager: + enabled: false + type: "" # one of: VAULT, GOOGLE_SECRET_MANAGER, AWS_SECRET_MANAGER, AZURE_KEY_VAULT, TESTING_CONFIG_DB_TABLE + #secretName: "airbyte-config-secrets" + + #awsSecretManager: + # region: + # authenticationType: credentials ## Use "credentials" or "instanceProfile" + # tags: ## Optional - You may add tags to new secrets created by Airbyte. + # - key: ## e.g. team + # value: ## e.g. deployments + # - key: business-unit + # value: engineering + # kms: ## Optional - ARN for KMS Decryption. + + #googleSecretManager: + # projectId: + # credentialsSecretKey: gcp.json + + #azureKeyVault: + # tenantId: "" + # vaultUrl: "" + # clientId: "" + # clientIdSecretKey: "" + # clientSecret: "" + # clientSecretSecretKey: "" + # tags: "" + + #vault: + + logging: + level: info + httpAccessLogsEnabled: false + log4jConfigFile: "" + + email: + enabled: false + secretName: "" + client: "" # one of: sengrid + + sendgrid: + apiKey: "" + apiKeySecretKey: "" + + connectorRegistry: + seedProvider: "local" + + workloads: + secretName: "" + + images: + workloadInit: "" + connectorSideCar: "" + containerOrchestrator: "" + + api: + bearerToken: "token" + bearerTokenSecretName: "" + bearerTokenSecretKey: "" + + containerOrchestrator: + javaOpts: [] + secretMountPath: "" + + queues: + check: [] + discover: [] + sync: [] + + pubSub: + enabled: false + topicName: "" + + resources: + useConnectorResourceDefaults: true + + mainContainer: + cpu: {} + memory: {} + check: + cpu: {} + memory: {} + discover: + cpu: {} + memory: {} + replication: + cpu: {} + memory: {} + sidecar: + cpu: {} + memory: {} + + fileTransfer: + storage: + request: 5G + limit: 5G + + featureFlags: + secretName: "" + client: "configfile" + + configfile: {} + + #launchdarkly: + # key: "" + + java: + opts: [] + metrics: # -- The metric client to configure globally. Supports "otel" metricClient: "" - # -- The open-telemetry-collector endpoint that metrics will be sent to - otelCollectorEndpoint: "" + + micrometer: + enabled: false + statsdFlavor: "datadog" # datadog + + otel: + resourceAttributes: {} + + collector: + endpoint: "" + + exporter: + name: "otlp" + protocol: "grpc" + timeout: "30000" + metricExportInterval: "10000" + + statsd: + host: "" + port: "" # Jobs resource requests and limits, see http://kubernetes.io/docs/user-guide/compute-resources/ # We usually recommend not to specify default resources and to leave this as a conscious @@ -179,6 +333,9 @@ global: # -- image pull secret to use for job pod main_container_image_pull_secret: "" + localVolume: + enabled: false + images: ## JOB_KUBE_BUSYBOX_IMAGE # -- busybox image used by the job pod @@ -187,6 +344,90 @@ global: # -- curl image used by the job pod curl: "curlimages/curl:8.1.1" + topology: + nodeSelectorLabel: "airbyte/node-pool" + nodeSelectors: + mainNodePool: main + jobsNodePool: jobs + quickJobsNodePool: quick-jobs + + temporal: + secretName: "" + + cli: + address: "" + namespace: "" + tlsCert: "" + tlsCertSecretKey: "TEMPORAL_CLOUD_CLIENT_CERT" + tlsKey: "" + tlsKeySecretKey: "TEMPORAL_CLOUD_CLIENT_KEY" + + cloud: + enabled: false + host: "" + namespace: "" + clientCert: "" + clientCertSecretKey: "TEMPORAL_CLOUD_CLIENT_CERT" + clientKey: "" + clientKeySecretKey: "TEMPORAL_CLOUD_CLIENT_KEY" + + sdk: + rpc: + timeout: "120s" + longPollTimeout: "140s" + queryTimeout: "20s" + + datadog: + enabled: false + env: dev + traceAgentPort: 8126 + + statsd: + port: 8125 + + integrations: + dbm: + enabled: false + propagationMode: "full" + + grpc: + enabled: false + clientEnabled: false + serverEnabled: false + + googleHttpClient: + enabled: false + + httpUrlConnection: + enabled: false + + urlConnection: + enabled: false + + netty: + enabled: false + + netty41: + enabled: false + + customerio: + secretName: "" + apiKeySecretKey: "" + + tracking: + enabled: true + secretName: "" + strategy: "segment" # one of: logging, segment + + segment: + writeKey: "" + writeKeySecretKey: "" + + micronaut: + environments: [] + + extraSelectorLabels: {} + ## @section Common Parameters # -- String to partially override airbyte.fullname template with a string (will prepend the release name) @@ -231,16 +472,13 @@ webapp: # -- Security context for the container podSecurityContext: - # gid=101(nginx) - fsGroup: 101 + fsGroup: 1000 containerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # uid=101(nginx) - runAsUser: 101 - # gid=101(nginx) - runAsGroup: 101 + runAsUser: 1000 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] @@ -342,7 +580,7 @@ webapp: # -- The webapp API url url: /api/v1/ - connector-builder-server: + connectorBuilderServer: url: /connector-builder-api fullstory: @@ -460,16 +698,13 @@ podSweeper: # -- Security context for the container podSecurityContext: - # gid=1001(anon) - fsGroup: 1001 + fsGroup: 1000 containerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # uid=1001(anon) - runAsUser: 1001 - # gid=1001(anon) - runAsGroup: 1001 + runAsUser: 1000 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] @@ -751,6 +986,10 @@ server: # -- Supply extra env variables to main container using simplified notation env_vars: {} + debug: + enabled: false + remoteDebugPort: 5005 + ## @section Worker Parameters worker: @@ -1171,6 +1410,8 @@ connectorRolloutWorker: extraContainers: [] + env_vars: {} + hpa: enabled: false @@ -1436,7 +1677,6 @@ airbyteBootloader: extraVolumes: [] ## @section Temporal parameters -## TODO: Move to consuming temporal from a dedicated helm chart temporal: enabled: true @@ -1451,6 +1691,41 @@ temporal: # -- The temporal image tag to use tag: "1.23.0" + secretName: "" + + autoSetup: true + + database: + engine: "postgresql" + host: "" + port: "" + user: "" + userSecretKey: "" + password: "" + passwordSecretKey: "" + + host: "" + + cli: + address: "" + namespace: "" + tlsCert: "" + tlsKey: "" + + cloud: + namespace: + clientCert: "" + clientKey: "" + + sdk: + rpc: + timeout: "120s" + longPollTimeout: "140s" + queryTimeout: "20s" + + worker: + ports: "" + service: # -- The Kubernetes Service Type type: ClusterIP @@ -1562,6 +1837,8 @@ temporal: extraContainers: [] + env_vars: {} + ## @section Temporal parameters ## TODO: Move to consuming temporal from a dedicated helm chart @@ -1688,10 +1965,14 @@ temporalUi: extraContainers: [] + env_vars: {} + debug: enabled: false remoteDebugPort: 5005 + + ## @section Airbyte Database parameters # PostgreSQL chart configuration, see https://github.com/bitnami/charts/blob/master/bitnami/postgresql/values.yaml @@ -1744,32 +2025,18 @@ postgresql: # https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity affinity: {} -# External PostgreSQL configuration, All of these values are only used when postgresql.enabled is set to false -externalDatabase: - # -- Database host - host: "" - # -- non-root Username for Airbyte Database - user: "" - # -- Database password - password: "" - # -- Name of an existing secret resource containing the DB password - existingSecret: "" - # -- Name of an existing secret key containing the DB password - existingSecretPasswordKey: "" - # -- Database name - database: "" - # -- Database port number - port: "" - # -- Database full JDBL URL (ex: jdbc:postgresql://host:port/db?parameters) - jdbcUrl: "" - minio: + secretName: "" + image: # -- Minio image used by Minio helm chart repository: minio/minio # -- Minio tag image tag: RELEASE.2023-11-20T22-40-07Z + rootUser: "" + rootPassword: "" + mcImage: repository: airbyte/mc tag: latest @@ -1789,6 +2056,13 @@ minio: # https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity affinity: {} + resources: + requests: + memory: 1Gi + cpu: 250m + limits: + cpu: 300m + memory: 2Gi ## @section cron parameters cron: enabled: true @@ -1800,6 +2074,10 @@ cron: # -- The pull policy to use for the airbyte cron image pullPolicy: IfNotPresent + jobs: + updateDefinitions: + enabled: false + ## workloadApi: ## bearerTokenSecretName: workload-api-bearer-token-name kubernetes secret name where bearer token is stored ## bearerTokenSecretKey: workload-api-bearer-token-key kubernetes secret key where bearer token is stored @@ -2077,18 +2355,54 @@ keycloak: enabled: true env_vars: {} + secretName: "" + + url: "" + internalHost: + internalProtocol: "http" + internalBasePath: "/auth" + + clientRealm: "" + realmIssuer: "" + + database: + name: "" + host: "" + + user: "" + userSecretKey: "" + + password: "" + passwordSecretKey: "" + auth: adminUsername: airbyteAdmin - adminPassword: keycloak123 + adminUsernameSecretKey: "" + + adminPassword: airbyte123 + adminPasswordSecretKey: "" + + adminRealm: "" + adminRealmSecretKey: "" + + adminCliClientId: "" image: repository: airbyte/keycloak pullPolicy: IfNotPresent + service: + type: ClusterIP + port: 8180 + annotations: {} + + headlessService: + annotations: {} + # -- Security context for the container podSecurityContext: # gid=0(root) - fsGroup: 0 + fsGroup: 1000 initContainers: initDb: @@ -2097,10 +2411,8 @@ keycloak: initContainerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # uid=70(postgres) - runAsUser: 70 - # gid=70(postgres) - runAsGroup: 70 + runAsUser: 1000 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] @@ -2130,6 +2442,46 @@ keycloak: # https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity affinity: {} + ## Configure extra options for the keycloak containers' liveness and readiness probes + ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-probes/#configure-probes + ## livenessProbe.enabled Enable livenessProbe on keycloak + ## livenessProbe.initialDelaySeconds Initial delay seconds for livenessProbe + ## livenessProbe.periodSeconds Period seconds for livenessProbe + ## livenessProbe.timeoutSeconds Timeout seconds for livenessProbe + ## livenessProbe.failureThreshold Failure threshold for livenessProbe + ## livenessProbe.successThreshold Success threshold for livenessProbe + ## + livenessProbe: + enabled: true + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + + ## readinessProbe.enabled Enable readinessProbe on keycloak + ## readinessProbe.initialDelaySeconds Initial delay seconds for readinessProbe + ## readinessProbe.periodSeconds Period seconds for readinessProbe + ## readinessProbe.timeoutSeconds Timeout seconds for readinessProbe + ## readinessProbe.failureThreshold Failure threshold for readinessProbe + ## readinessProbe.successThreshold Success threshold for readinessProbe + ## + readinessProbe: + enabled: true + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 1 + failureThreshold: 3 + successThreshold: 1 + + startupProbe: + enabled: true + initialDelaySeconds: 30 + periodSeconds: 5 + timeoutSeconds: 1 + failureThreshold: 60 + successThreshold: 1 + keycloakSetup: enabled: true env_vars: {} @@ -2146,10 +2498,8 @@ keycloakSetup: initContainerSecurityContext: allowPrivilegeEscalation: false runAsNonRoot: true - # gid=100(curl_user) - runAsUser: 100 - # gid=101(curl_group) - runAsGroup: 101 + runAsUser: 1000 + runAsGroup: 1000 readOnlyRootFilesystem: false capabilities: drop: ["ALL"] @@ -2183,6 +2533,10 @@ keycloakSetup: # https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#affinity-and-anti-affinity affinity: {} + debug: + enabled: false + remoteDebugPort: 5005 + workloadApiServer: enabled: true diff --git a/deps.toml b/deps.toml index b8debc85915..96a3197a7f4 100644 --- a/deps.toml +++ b/deps.toml @@ -1,5 +1,5 @@ [versions] -airbyte-protocol = "0.14.0" +airbyte-protocol = "0.14.1" azure-kevault-secrets = "4.3.5" azure-identity = "1.3.5" azure-storage = "12.27.1" @@ -13,30 +13,29 @@ failsafe = "3.3.2" flyway = "10.10.0" glassfish_version = "2.31" grpc_version = "1.62.2" -hikaricp = "5.0.1" +hikaricp = "6.0.0" jax-rs = "3.1.0" jna = "5.8.0" jmh = "1.37" jooq = "3.19.7" -junit-jupiter = "5.10.3" +junit-jupiter = "5.11.3" keycloak = "25.0.2" # should be kept in sync with airbyte-keycloak/Dockerfile base image version kotest = "5.9.1" kotlin-logging = "5.1.0" kubernetes-client = "6.12.1" -logback = "1.5.8" -lombok = "1.18.34" -micronaut = "4.6.3" -micronaut-cache = "5.0.1" -micronaut-core = "4.6.6" -micronaut-data = "4.9.4" -micronaut-email = "2.6.0" -micronaut-jaxrs = "4.6.1" -micronaut-jdbc = "5.8.1" -micronaut-kotlin = "4.4.0" -micronaut-micrometer = "5.8.0" -micronaut-openapi = "6.12.3" -micronaut-security = "4.10.2" -micronaut-test = "4.5.0" +logback = "1.5.12" +micronaut = "4.7.1" +micronaut-cache = "5.1.0" +micronaut-core = "4.7.6" +micronaut-data = "4.10.4" +micronaut-email = "2.7.0" +micronaut-jaxrs = "4.7.2" +micronaut-jdbc = "6.0.2" +micronaut-kotlin = "4.5.0" +micronaut-micrometer = "5.9.3" +micronaut-openapi = "6.13.2" +micronaut-security = "4.11.2" +micronaut-test = "4.6.0" moshi = "1.15.0" mockito = "5.8.0" mockito-kotlin = "5.2.1" @@ -153,7 +152,6 @@ kubernetes-httpclient-okhttp = { module = "io.fabric8:kubernetes-httpclient-okht launchdarkly = { module = "com.launchdarkly:launchdarkly-java-server-sdk", version = "7.2.6" } logback-classic = { module = "ch.qos.logback:logback-classic", version.ref = "logback" } logback-core = { module = "ch.qos.logback:logback-core", version.ref = "logback" } -lombok = { module = "org.projectlombok:lombok", version.ref = "lombok" } micrometer-statsd = { module = "io.micrometer:micrometer-registry-statsd", version = "1.9.3" } moshi-kotlin = { module = "com.squareup.moshi:moshi-kotlin", version.ref = "moshi" } mockito-core = { module = "org.mockito:mockito-core", version.ref = "mockito" } @@ -223,7 +221,7 @@ micronaut-data-model = { module = "io.micronaut.data:micronaut-data-model", vers micronaut-data-tx = { module = "io.micronaut.data:micronaut-data-tx", version.ref = "micronaut-data" } micronaut-email = { module = "io.micronaut.email:micronaut-email", version.ref = "micronaut-email" } micronaut-email-sendgrid = { module = "io.micronaut.email:micronaut-email-sendgrid", version.ref = "micronaut-email" } -micronaut-flyway = { module = "io.micronaut.flyway:micronaut-flyway", version = "7.4.0" } +micronaut-flyway = { module = "io.micronaut.flyway:micronaut-flyway", version = "7.6.0" } micronaut-inject = { module = "io.micronaut:micronaut-inject", version.ref = "micronaut-core" } micronaut-http = { module = "io.micronaut:micronaut-http", version.ref = "micronaut-core" } micronaut-http-client = { module = "io.micronaut:micronaut-http-client", version.ref = "micronaut-core" } @@ -245,17 +243,17 @@ micronaut-micrometer-registry-datadog = { module = "io.micronaut.micrometer:micr micronaut-micrometer-registry-statsd = { module = "io.micronaut.micrometer:micronaut-micrometer-registry-statsd", version.ref = "micronaut-micrometer" } micronaut-openapi = { module = "io.micronaut.openapi:micronaut-openapi", version.ref = "micronaut-openapi" } micronaut-openapi-annotations = { module = "io.micronaut.openapi:micronaut-openapi-annotations", version.ref = "micronaut-openapi" } -micronaut-picocli = { module = "io.micronaut.picocli:micronaut-picocli", version = "5.5.0" } +micronaut-picocli = { module = "io.micronaut.picocli:micronaut-picocli", version = "5.6.0" } micronaut-platform = { module = "io.micronaut.platform:micronaut-platform", version.ref = "micronaut" } -micronaut-problem-json = { module = "io.micronaut.problem:micronaut-problem-json", version = "3.5.0" } -micronaut-redis-lettuce = { module = "io.micronaut.redis:micronaut-redis-lettuce", version = "6.6.0" } +micronaut-problem-json = { module = "io.micronaut.problem:micronaut-problem-json", version = "3.6.0" } +micronaut-redis-lettuce = { module = "io.micronaut.redis:micronaut-redis-lettuce", version = "6.6.1" } micronaut-runtime = { module = "io.micronaut:micronaut-runtime", version.ref = "micronaut-core" } micronaut-security = { module = "io.micronaut.security:micronaut-security", version.ref = "micronaut-security" } micronaut-security-jwt = { module = "io.micronaut.security:micronaut-security-jwt", version.ref = "micronaut-security" } micronaut-security-oauth2 = { module = "io.micronaut.security:micronaut-security-oauth2", version.ref = "micronaut-security" } micronaut-test-core = { module = "io.micronaut.test:micronaut-test-core", version.ref = "micronaut-test" } micronaut-test-junit5 = { module = "io.micronaut.test:micronaut-test-junit5", version.ref = "micronaut-test" } -micronaut-validation = { module = "io.micronaut.validation:micronaut-validation", version = "4.7.0" } +micronaut-validation = { module = "io.micronaut.validation:micronaut-validation", version = "4.8.0" } [bundles] apache = ["apache-commons", "apache-commons-lang"] diff --git a/docker/Makefile b/docker/Makefile index 238049af0ac..12d9c15290b 100644 --- a/docker/Makefile +++ b/docker/Makefile @@ -37,9 +37,8 @@ image.airbyte-base-java-python-image: buildx.start --push \ -f $(IMAGES_DIR)/airbyte-base-java-python-image/Dockerfile . ; \ else \ - docker build \ + docker build -t airbyte/airbyte-base-java-python-image:$(VERSION) \ --build-arg JDK_IMAGE=$(BASE_JAVA_IMAGE) \ - -t airbyte/airbyte-base-java-python-image:$(VERSION) \ -f $(IMAGES_DIR)/airbyte-base-java-python-image/Dockerfile . ; \ fi @@ -52,9 +51,8 @@ image.airbyte-base-java-worker-image: buildx.start --push \ -f $(IMAGES_DIR)/airbyte-base-java-worker-image/Dockerfile . ; \ else \ - docker build \ + docker build -t airbyte/airbyte-base-java-worker-image:$(VERSION) \ --build-arg JDK_IMAGE=$(BASE_JAVA_IMAGE) \ - -t airbyte/airbyte-base-java-worker-image:$(VERSION) \ -f $(IMAGES_DIR)/airbyte-base-java-worker-image/Dockerfile . ; \ fi @@ -67,9 +65,8 @@ image.airbyte-busybox: buildx.start --push \ -f $(IMAGES_DIR)/airbyte-busybox/Dockerfile . ; \ else \ - docker build \ + docker build -t airbyte/busybox:$(VERSION) \ --build-arg BASE_BUSYBOX_IMAGE_VERSION=$(BASE_BUSYBOX_IMAGE_VERSION) \ - -t airbyte/busybox:$(VERSION) \ -f $(IMAGES_DIR)/airbyte-busybox/Dockerfile . ; \ fi @@ -82,8 +79,41 @@ image.airbyte-mc: buildx.start --push \ -f $(IMAGES_DIR)/airbyte-mc/Dockerfile . ; \ else \ - docker build \ + docker build -t airbyte/mc:$(VERSION) \ --build-arg ALPINE_IMAGE_VERSION=$(ALPINE_IMAGE_VERSION) \ - -t airbyte/mc:$(VERSION) \ -f $(IMAGES_DIR)/airbyte-mc/Dockerfile . ; \ fi + +image.airbyte-nginx-slim: ## Build the airbyte/nginx-unprivileged image +image.airbyte-nginx-slim: buildx.start + @if [ "$(PUBLISH)" = "true" ]; then \ + docker buildx build -t airbyte/nginx-alpine-slim:$(VERSION) \ + --build-arg UID="1000" \ + --build-arg GID="1000" \ + --platform linux/amd64,linux/arm64 \ + --push \ + -f $(IMAGES_DIR)/airbyte-nginx/alpine-slim/Dockerfile $(IMAGES_DIR)/airbyte-nginx/alpine-slim/. ; \ + else \ + docker build -t airbyte/nginx-alpine-slim:$(VERSION) \ + --build-arg UID="1000" \ + --build-arg GID="1000" \ + -f $(IMAGES_DIR)/airbyte-nginx/alpine-slim/Dockerfile $(IMAGES_DIR)/airbyte-nginx/alpine-slim/. ; \ + fi + +image.airbyte-nginx-unprivileged: ## Build the airbyte/nginx-unprivileged image +image.airbyte-nginx-unprivileged: buildx.start + @if [ "$(PUBLISH)" = "true" ]; then \ + docker buildx build -t airbyte/nginx-unprivileged:$(VERSION) \ + --build-arg IMAGE="airbyte/nginx-alpine-slim:1.27.2-alpine-slim-2" \ + --build-arg UID="1000" \ + --build-arg GID="1000" \ + --platform linux/amd64,linux/arm64 \ + --push \ + -f $(IMAGES_DIR)/airbyte-nginx/unprivileged/Dockerfile $(IMAGES_DIR)/airbyte-nginx/unprivileged/. ; \ + else \ + docker build -t airbyte/nginx-unprivileged:$(VERSION) \ + --build-arg IMAGE="airbyte/nginx-alpine-slim:1.27.2-alpine-slim-2" \ + --build-arg UID="1000" \ + --build-arg GID="1000" \ + -f $(IMAGES_DIR)/airbyte-nginx/unprivileged/Dockerfile $(IMAGES_DIR)/airbyte-nginx/unprivileged/. ; \ + fi diff --git a/docker/airbyte-nginx/alpine-slim/10-listen-on-ipv6-by-default.sh b/docker/airbyte-nginx/alpine-slim/10-listen-on-ipv6-by-default.sh new file mode 100755 index 00000000000..29898b1c222 --- /dev/null +++ b/docker/airbyte-nginx/alpine-slim/10-listen-on-ipv6-by-default.sh @@ -0,0 +1,67 @@ +#!/bin/sh +# vim:sw=4:ts=4:et + +set -e + +entrypoint_log() { + if [ -z "${NGINX_ENTRYPOINT_QUIET_LOGS:-}" ]; then + echo "$@" + fi +} + +ME=$(basename "$0") +DEFAULT_CONF_FILE="etc/nginx/conf.d/default.conf" + +# check if we have ipv6 available +if [ ! -f "/proc/net/if_inet6" ]; then + entrypoint_log "$ME: info: ipv6 not available" + exit 0 +fi + +if [ ! -f "/$DEFAULT_CONF_FILE" ]; then + entrypoint_log "$ME: info: /$DEFAULT_CONF_FILE is not a file or does not exist" + exit 0 +fi + +# check if the file can be modified, e.g. not on a r/o filesystem +touch /$DEFAULT_CONF_FILE 2>/dev/null || { entrypoint_log "$ME: info: can not modify /$DEFAULT_CONF_FILE (read-only file system?)"; exit 0; } + +# check if the file is already modified, e.g. on a container restart +grep -q "listen \[::]\:8080;" /$DEFAULT_CONF_FILE && { entrypoint_log "$ME: info: IPv6 listen already enabled"; exit 0; } + +if [ -f "/etc/os-release" ]; then + . /etc/os-release +else + entrypoint_log "$ME: info: can not guess the operating system" + exit 0 +fi + +entrypoint_log "$ME: info: Getting the checksum of /$DEFAULT_CONF_FILE" + +case "$ID" in + "debian") + CHECKSUM=$(dpkg-query --show --showformat='${Conffiles}\n' nginx | grep $DEFAULT_CONF_FILE | cut -d' ' -f 3) + echo "$CHECKSUM /$DEFAULT_CONF_FILE" | md5sum -c - >/dev/null 2>&1 || { + entrypoint_log "$ME: info: /$DEFAULT_CONF_FILE differs from the packaged version" + exit 0 + } + ;; + "alpine") + CHECKSUM=$(apk manifest nginx 2>/dev/null| grep $DEFAULT_CONF_FILE | cut -d' ' -f 1 | cut -d ':' -f 2) + echo "$CHECKSUM /$DEFAULT_CONF_FILE" | sha1sum -c - >/dev/null 2>&1 || { + entrypoint_log "$ME: info: /$DEFAULT_CONF_FILE differs from the packaged version" + exit 0 + } + ;; + *) + entrypoint_log "$ME: info: Unsupported distribution" + exit 0 + ;; +esac + +# enable ipv6 on default.conf listen sockets +sed -i -E 's,listen 8080;,listen 8080;\n listen [::]:8080;,' /$DEFAULT_CONF_FILE + +entrypoint_log "$ME: info: Enabled listen on IPv6 in /$DEFAULT_CONF_FILE" + +exit 0 diff --git a/docker/airbyte-nginx/alpine-slim/15-local-resolvers.envsh b/docker/airbyte-nginx/alpine-slim/15-local-resolvers.envsh new file mode 100755 index 00000000000..e830ddacda8 --- /dev/null +++ b/docker/airbyte-nginx/alpine-slim/15-local-resolvers.envsh @@ -0,0 +1,15 @@ +#!/bin/sh +# vim:sw=2:ts=2:sts=2:et + +set -eu + +LC_ALL=C +PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + +[ "${NGINX_ENTRYPOINT_LOCAL_RESOLVERS:-}" ] || return 0 + +NGINX_LOCAL_RESOLVERS=$(awk 'BEGIN{ORS=" "} $1=="nameserver" {if ($2 ~ ":") {print "["$2"]"} else {print $2}}' /etc/resolv.conf) + +NGINX_LOCAL_RESOLVERS="${NGINX_LOCAL_RESOLVERS% }" + +export NGINX_LOCAL_RESOLVERS diff --git a/docker/airbyte-nginx/alpine-slim/20-envsubst-on-templates.sh b/docker/airbyte-nginx/alpine-slim/20-envsubst-on-templates.sh new file mode 100755 index 00000000000..6938405bea2 --- /dev/null +++ b/docker/airbyte-nginx/alpine-slim/20-envsubst-on-templates.sh @@ -0,0 +1,78 @@ +#!/bin/sh + +set -e + +ME=$(basename "$0") + +entrypoint_log() { + if [ -z "${NGINX_ENTRYPOINT_QUIET_LOGS:-}" ]; then + echo "$@" + fi +} + +add_stream_block() { + local conffile="/etc/nginx/nginx.conf" + + if grep -q -E "\s*stream\s*\{" "$conffile"; then + entrypoint_log "$ME: $conffile contains a stream block; include $stream_output_dir/*.conf to enable stream templates" + else + # check if the file can be modified, e.g. not on a r/o filesystem + touch "$conffile" 2>/dev/null || { entrypoint_log "$ME: info: can not modify $conffile (read-only file system?)"; exit 0; } + entrypoint_log "$ME: Appending stream block to $conffile to include $stream_output_dir/*.conf" + cat << END >> "$conffile" +# added by "$ME" on "$(date)" +stream { + include $stream_output_dir/*.conf; +} +END + fi +} + +auto_envsubst() { + local template_dir="${NGINX_ENVSUBST_TEMPLATE_DIR:-/etc/nginx/templates}" + local suffix="${NGINX_ENVSUBST_TEMPLATE_SUFFIX:-.template}" + local output_dir="${NGINX_ENVSUBST_OUTPUT_DIR:-/etc/nginx/conf.d}" + local stream_suffix="${NGINX_ENVSUBST_STREAM_TEMPLATE_SUFFIX:-.stream-template}" + local stream_output_dir="${NGINX_ENVSUBST_STREAM_OUTPUT_DIR:-/etc/nginx/stream-conf.d}" + local filter="${NGINX_ENVSUBST_FILTER:-}" + + local template defined_envs relative_path output_path subdir + defined_envs=$(printf '${%s} ' $(awk "END { for (name in ENVIRON) { print ( name ~ /${filter}/ ) ? name : \"\" } }" < /dev/null )) + [ -d "$template_dir" ] || return 0 + if [ ! -w "$output_dir" ]; then + entrypoint_log "$ME: ERROR: $template_dir exists, but $output_dir is not writable" + return 0 + fi + find "$template_dir" -follow -type f -name "*$suffix" -print | while read -r template; do + relative_path="${template#"$template_dir/"}" + output_path="$output_dir/${relative_path%"$suffix"}" + subdir=$(dirname "$relative_path") + # create a subdirectory where the template file exists + mkdir -p "$output_dir/$subdir" + entrypoint_log "$ME: Running envsubst on $template to $output_path" + envsubst "$defined_envs" < "$template" > "$output_path" + done + + # Print the first file with the stream suffix, this will be false if there are none + if test -n "$(find "$template_dir" -name "*$stream_suffix" -print -quit)"; then + mkdir -p "$stream_output_dir" + if [ ! -w "$stream_output_dir" ]; then + entrypoint_log "$ME: ERROR: $template_dir exists, but $stream_output_dir is not writable" + return 0 + fi + add_stream_block + find "$template_dir" -follow -type f -name "*$stream_suffix" -print | while read -r template; do + relative_path="${template#"$template_dir/"}" + output_path="$stream_output_dir/${relative_path%"$stream_suffix"}" + subdir=$(dirname "$relative_path") + # create a subdirectory where the template file exists + mkdir -p "$stream_output_dir/$subdir" + entrypoint_log "$ME: Running envsubst on $template to $output_path" + envsubst "$defined_envs" < "$template" > "$output_path" + done + fi +} + +auto_envsubst + +exit 0 diff --git a/docker/airbyte-nginx/alpine-slim/30-tune-worker-processes.sh b/docker/airbyte-nginx/alpine-slim/30-tune-worker-processes.sh new file mode 100755 index 00000000000..defb994f3d0 --- /dev/null +++ b/docker/airbyte-nginx/alpine-slim/30-tune-worker-processes.sh @@ -0,0 +1,188 @@ +#!/bin/sh +# vim:sw=2:ts=2:sts=2:et + +set -eu + +LC_ALL=C +ME=$(basename "$0") +PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin + +[ "${NGINX_ENTRYPOINT_WORKER_PROCESSES_AUTOTUNE:-}" ] || exit 0 + +touch /etc/nginx/nginx.conf 2>/dev/null || { echo >&2 "$ME: error: can not modify /etc/nginx/nginx.conf (read-only file system?)"; exit 0; } + +ceildiv() { + num=$1 + div=$2 + echo $(( (num + div - 1) / div )) +} + +get_cpuset() { + cpusetroot=$1 + cpusetfile=$2 + ncpu=0 + [ -f "$cpusetroot/$cpusetfile" ] || return 1 + for token in $( tr ',' ' ' < "$cpusetroot/$cpusetfile" ); do + case "$token" in + *-*) + count=$( seq $(echo "$token" | tr '-' ' ') | wc -l ) + ncpu=$(( ncpu+count )) + ;; + *) + ncpu=$(( ncpu+1 )) + ;; + esac + done + echo "$ncpu" +} + +get_quota() { + cpuroot=$1 + ncpu=0 + [ -f "$cpuroot/cpu.cfs_quota_us" ] || return 1 + [ -f "$cpuroot/cpu.cfs_period_us" ] || return 1 + cfs_quota=$( cat "$cpuroot/cpu.cfs_quota_us" ) + cfs_period=$( cat "$cpuroot/cpu.cfs_period_us" ) + [ "$cfs_quota" = "-1" ] && return 1 + [ "$cfs_period" = "0" ] && return 1 + ncpu=$( ceildiv "$cfs_quota" "$cfs_period" ) + [ "$ncpu" -gt 0 ] || return 1 + echo "$ncpu" +} + +get_quota_v2() { + cpuroot=$1 + ncpu=0 + [ -f "$cpuroot/cpu.max" ] || return 1 + cfs_quota=$( cut -d' ' -f 1 < "$cpuroot/cpu.max" ) + cfs_period=$( cut -d' ' -f 2 < "$cpuroot/cpu.max" ) + [ "$cfs_quota" = "max" ] && return 1 + [ "$cfs_period" = "0" ] && return 1 + ncpu=$( ceildiv "$cfs_quota" "$cfs_period" ) + [ "$ncpu" -gt 0 ] || return 1 + echo "$ncpu" +} + +get_cgroup_v1_path() { + needle=$1 + found= + foundroot= + mountpoint= + + [ -r "/proc/self/mountinfo" ] || return 1 + [ -r "/proc/self/cgroup" ] || return 1 + + while IFS= read -r line; do + case "$needle" in + "cpuset") + case "$line" in + *cpuset*) + found=$( echo "$line" | cut -d ' ' -f 4,5 ) + break + ;; + esac + ;; + "cpu") + case "$line" in + *cpuset*) + ;; + *cpu,cpuacct*|*cpuacct,cpu|*cpuacct*|*cpu*) + found=$( echo "$line" | cut -d ' ' -f 4,5 ) + break + ;; + esac + esac + done << __EOF__ +$( grep -F -- '- cgroup ' /proc/self/mountinfo ) +__EOF__ + + while IFS= read -r line; do + controller=$( echo "$line" | cut -d: -f 2 ) + case "$needle" in + "cpuset") + case "$controller" in + cpuset) + mountpoint=$( echo "$line" | cut -d: -f 3 ) + break + ;; + esac + ;; + "cpu") + case "$controller" in + cpu,cpuacct|cpuacct,cpu|cpuacct|cpu) + mountpoint=$( echo "$line" | cut -d: -f 3 ) + break + ;; + esac + ;; + esac +done << __EOF__ +$( grep -F -- 'cpu' /proc/self/cgroup ) +__EOF__ + + case "${found%% *}" in + "/") + foundroot="${found##* }$mountpoint" + ;; + "$mountpoint") + foundroot="${found##* }" + ;; + esac + echo "$foundroot" +} + +get_cgroup_v2_path() { + found= + foundroot= + mountpoint= + + [ -r "/proc/self/mountinfo" ] || return 1 + [ -r "/proc/self/cgroup" ] || return 1 + + while IFS= read -r line; do + found=$( echo "$line" | cut -d ' ' -f 4,5 ) + done << __EOF__ +$( grep -F -- '- cgroup2 ' /proc/self/mountinfo ) +__EOF__ + + while IFS= read -r line; do + mountpoint=$( echo "$line" | cut -d: -f 3 ) +done << __EOF__ +$( grep -F -- '0::' /proc/self/cgroup ) +__EOF__ + + case "${found%% *}" in + "") + return 1 + ;; + "/") + foundroot="${found##* }$mountpoint" + ;; + "$mountpoint" | /../*) + foundroot="${found##* }" + ;; + esac + echo "$foundroot" +} + +ncpu_online=$( getconf _NPROCESSORS_ONLN ) +ncpu_cpuset= +ncpu_quota= +ncpu_cpuset_v2= +ncpu_quota_v2= + +cpuset=$( get_cgroup_v1_path "cpuset" ) && ncpu_cpuset=$( get_cpuset "$cpuset" "cpuset.effective_cpus" ) || ncpu_cpuset=$ncpu_online +cpu=$( get_cgroup_v1_path "cpu" ) && ncpu_quota=$( get_quota "$cpu" ) || ncpu_quota=$ncpu_online +cgroup_v2=$( get_cgroup_v2_path ) && ncpu_cpuset_v2=$( get_cpuset "$cgroup_v2" "cpuset.cpus.effective" ) || ncpu_cpuset_v2=$ncpu_online +cgroup_v2=$( get_cgroup_v2_path ) && ncpu_quota_v2=$( get_quota_v2 "$cgroup_v2" ) || ncpu_quota_v2=$ncpu_online + +ncpu=$( printf "%s\n%s\n%s\n%s\n%s\n" \ + "$ncpu_online" \ + "$ncpu_cpuset" \ + "$ncpu_quota" \ + "$ncpu_cpuset_v2" \ + "$ncpu_quota_v2" \ + | sort -n \ + | head -n 1 ) + +sed -i.bak -r 's/^(worker_processes)(.*)$/# Commented out by '"$ME"' on '"$(date)"'\n#\1\2\n\1 '"$ncpu"';/' /etc/nginx/nginx.conf diff --git a/docker/airbyte-nginx/alpine-slim/Dockerfile b/docker/airbyte-nginx/alpine-slim/Dockerfile new file mode 100644 index 00000000000..c1b0b0c871d --- /dev/null +++ b/docker/airbyte-nginx/alpine-slim/Dockerfile @@ -0,0 +1,140 @@ +# +# NOTE: THIS DOCKERFILE IS GENERATED VIA "update.sh" +# +# PLEASE DO NOT EDIT IT DIRECTLY. +# +ARG IMAGE=alpine:3.20 +FROM $IMAGE + +LABEL maintainer="NGINX Docker Maintainers " + +ENV NGINX_VERSION=1.27.2 +ENV PKG_RELEASE=1 +ENV DYNPKG_RELEASE=1 + +ARG UID=101 +ARG GID=101 + +RUN set -x \ +# create nginx user/group first, to be consistent throughout docker variants + && addgroup -g $GID -S nginx || true \ + && adduser -S -D -H -u $UID -h /var/cache/nginx -s /sbin/nologin -G nginx -g nginx nginx || true \ + && apkArch="$(cat /etc/apk/arch)" \ + && nginxPackages=" \ + nginx=${NGINX_VERSION}-r${PKG_RELEASE} \ + " \ +# install prerequisites for public key and pkg-oss checks + && apk add --no-cache --virtual .checksum-deps \ + openssl \ + && case "$apkArch" in \ + x86_64|aarch64) \ +# arches officially built by upstream + set -x \ + && KEY_SHA512="e09fa32f0a0eab2b879ccbbc4d0e4fb9751486eedda75e35fac65802cc9faa266425edf83e261137a2f4d16281ce2c1a5f4502930fe75154723da014214f0655" \ + && wget -O /tmp/nginx_signing.rsa.pub https://nginx.org/keys/nginx_signing.rsa.pub \ + && if echo "$KEY_SHA512 */tmp/nginx_signing.rsa.pub" | sha512sum -c -; then \ + echo "key verification succeeded!"; \ + mv /tmp/nginx_signing.rsa.pub /etc/apk/keys/; \ + else \ + echo "key verification failed!"; \ + exit 1; \ + fi \ + && apk add -X "https://nginx.org/packages/mainline/alpine/v$(egrep -o '^[0-9]+\.[0-9]+' /etc/alpine-release)/main" --no-cache $nginxPackages \ + ;; \ + *) \ +# we're on an architecture upstream doesn't officially build for +# let's build binaries from the published packaging sources + set -x \ + && tempDir="$(mktemp -d)" \ + && chown nobody:nobody $tempDir \ + && apk add --no-cache --virtual .build-deps \ + gcc \ + libc-dev \ + make \ + openssl-dev \ + pcre2-dev \ + zlib-dev \ + linux-headers \ + bash \ + alpine-sdk \ + findutils \ + curl \ + && su nobody -s /bin/sh -c " \ + export HOME=${tempDir} \ + && cd ${tempDir} \ + && curl -f -L -O https://github.com/nginx/pkg-oss/archive/${NGINX_VERSION}-${PKG_RELEASE}.tar.gz \ + && PKGOSSCHECKSUM=\"6982e2df739645fc72db5bdf994032f799718230e7016e811d9d482e5cf41814c888660ca9a68814d5e99ab571e892ada3bd43166e720cbf04c7f85b6934772c *${NGINX_VERSION}-${PKG_RELEASE}.tar.gz\" \ + && if [ \"\$(openssl sha512 -r ${NGINX_VERSION}-${PKG_RELEASE}.tar.gz)\" = \"\$PKGOSSCHECKSUM\" ]; then \ + echo \"pkg-oss tarball checksum verification succeeded!\"; \ + else \ + echo \"pkg-oss tarball checksum verification failed!\"; \ + exit 1; \ + fi \ + && tar xzvf ${NGINX_VERSION}-${PKG_RELEASE}.tar.gz \ + && cd pkg-oss-${NGINX_VERSION}-${PKG_RELEASE} \ + && cd alpine \ + && make base \ + && apk index --allow-untrusted -o ${tempDir}/packages/alpine/${apkArch}/APKINDEX.tar.gz ${tempDir}/packages/alpine/${apkArch}/*.apk \ + && abuild-sign -k ${tempDir}/.abuild/abuild-key.rsa ${tempDir}/packages/alpine/${apkArch}/APKINDEX.tar.gz \ + " \ + && cp ${tempDir}/.abuild/abuild-key.rsa.pub /etc/apk/keys/ \ + && apk del --no-network .build-deps \ + && apk add -X ${tempDir}/packages/alpine/ --no-cache $nginxPackages \ + ;; \ + esac \ +# remove checksum deps + && apk del --no-network .checksum-deps \ +# if we have leftovers from building, let's purge them (including extra, unnecessary build deps) + && if [ -n "$tempDir" ]; then rm -rf "$tempDir"; fi \ + && if [ -f "/etc/apk/keys/abuild-key.rsa.pub" ]; then rm -f /etc/apk/keys/abuild-key.rsa.pub; fi \ +# Bring in gettext so we can get `envsubst`, then throw +# the rest away. To do this, we need to install `gettext` +# then move `envsubst` out of the way so `gettext` can +# be deleted completely, then move `envsubst` back. + && apk add --no-cache --virtual .gettext gettext \ + && mv /usr/bin/envsubst /tmp/ \ + \ + && runDeps="$( \ + scanelf --needed --nobanner /tmp/envsubst \ + | awk '{ gsub(/,/, "\nso:", $2); print "so:" $2 }' \ + | sort -u \ + | xargs -r apk info --installed \ + | sort -u \ + )" \ + && apk add --no-cache $runDeps \ + && apk del --no-network .gettext \ + && mv /tmp/envsubst /usr/local/bin/ \ +# Bring in tzdata so users could set the timezones through the environment +# variables + && apk add --no-cache tzdata \ +# forward request and error logs to docker log collector + && ln -sf /dev/stdout /var/log/nginx/access.log \ + && ln -sf /dev/stderr /var/log/nginx/error.log \ +# create a docker-entrypoint.d directory + && mkdir /docker-entrypoint.d + +# implement changes required to run NGINX as an unprivileged user +RUN sed -i 's,listen 80;,listen 8080;,' /etc/nginx/conf.d/default.conf \ + && sed -i '/user nginx;/d' /etc/nginx/nginx.conf \ + && sed -i 's,/var/run/nginx.pid,/tmp/nginx.pid,' /etc/nginx/nginx.conf \ + && sed -i "/^http {/a \ proxy_temp_path /tmp/proxy_temp;\n client_body_temp_path /tmp/client_temp;\n fastcgi_temp_path /tmp/fastcgi_temp;\n uwsgi_temp_path /tmp/uwsgi_temp;\n scgi_temp_path /tmp/scgi_temp;\n" /etc/nginx/nginx.conf \ +# nginx user must own the cache and etc directory to write cache and tweak the nginx config + && chown -R $UID:0 /var/cache/nginx \ + && chmod -R g+w /var/cache/nginx \ + && chown -R $UID:0 /etc/nginx \ + && chmod -R g+w /etc/nginx + +COPY docker-entrypoint.sh / +COPY 10-listen-on-ipv6-by-default.sh /docker-entrypoint.d +COPY 15-local-resolvers.envsh /docker-entrypoint.d +COPY 20-envsubst-on-templates.sh /docker-entrypoint.d +COPY 30-tune-worker-processes.sh /docker-entrypoint.d +ENTRYPOINT ["/docker-entrypoint.sh"] + +EXPOSE 8080 + +STOPSIGNAL SIGQUIT + +USER $UID + +CMD ["nginx", "-g", "daemon off;"] diff --git a/docker/airbyte-nginx/alpine-slim/README.md b/docker/airbyte-nginx/alpine-slim/README.md new file mode 100644 index 00000000000..ba68f981e95 --- /dev/null +++ b/docker/airbyte-nginx/alpine-slim/README.md @@ -0,0 +1,12 @@ +# alpine-slim + +> [!NOTE] +> If you publish a new version of this image, you will need to update the `--build-arge IMAGE=` argument passed to the +> `image.airbyte-nginx-unprivileged` task in the [Makefile](../../Makefile) + +This [Dockerfile](Dockerfile) is a copy of the [unprivileged nginx alpine-slim Dockerfile](https://github.com/nginxinc/docker-nginx-unprivileged/blob/main/mainline/alpine-slim/Dockerfile). + +We want all of Airbyte to run as the same uid/gid which requires us to build our own nginx image. The nginx provided image uses uid/guid 101/101, and +we want them to be 1000/1000, which requires the `build-arg` values of `UID` and `GID` to be defined. + +See [the docker Makefile](../../Makefile) for where these two `build-arg` values are defined. diff --git a/docker/airbyte-nginx/alpine-slim/docker-entrypoint.sh b/docker/airbyte-nginx/alpine-slim/docker-entrypoint.sh new file mode 100755 index 00000000000..8ea04f217da --- /dev/null +++ b/docker/airbyte-nginx/alpine-slim/docker-entrypoint.sh @@ -0,0 +1,47 @@ +#!/bin/sh +# vim:sw=4:ts=4:et + +set -e + +entrypoint_log() { + if [ -z "${NGINX_ENTRYPOINT_QUIET_LOGS:-}" ]; then + echo "$@" + fi +} + +if [ "$1" = "nginx" ] || [ "$1" = "nginx-debug" ]; then + if /usr/bin/find "/docker-entrypoint.d/" -mindepth 1 -maxdepth 1 -type f -print -quit 2>/dev/null | read v; then + entrypoint_log "$0: /docker-entrypoint.d/ is not empty, will attempt to perform configuration" + + entrypoint_log "$0: Looking for shell scripts in /docker-entrypoint.d/" + find "/docker-entrypoint.d/" -follow -type f -print | sort -V | while read -r f; do + case "$f" in + *.envsh) + if [ -x "$f" ]; then + entrypoint_log "$0: Sourcing $f"; + . "$f" + else + # warn on shell scripts without exec bit + entrypoint_log "$0: Ignoring $f, not executable"; + fi + ;; + *.sh) + if [ -x "$f" ]; then + entrypoint_log "$0: Launching $f"; + "$f" + else + # warn on shell scripts without exec bit + entrypoint_log "$0: Ignoring $f, not executable"; + fi + ;; + *) entrypoint_log "$0: Ignoring $f";; + esac + done + + entrypoint_log "$0: Configuration complete; ready for start up" + else + entrypoint_log "$0: No files found in /docker-entrypoint.d/, skipping configuration" + fi +fi + +exec "$@" diff --git a/docker/airbyte-nginx/unprivileged/Dockerfile b/docker/airbyte-nginx/unprivileged/Dockerfile new file mode 100644 index 00000000000..2c6dab65a79 --- /dev/null +++ b/docker/airbyte-nginx/unprivileged/Dockerfile @@ -0,0 +1,87 @@ +# +# NOTE: THIS DOCKERFILE IS GENERATED VIA "update.sh" +# +# PLEASE DO NOT EDIT IT DIRECTLY. +# +ARG IMAGE=nginxinc/nginx-unprivileged:1.27.2-alpine-slim +FROM $IMAGE + +ENV NJS_VERSION=0.8.6 +ENV NJS_RELEASE=1 + +ARG UID=101 +ARG GID=101 + +USER root + +RUN set -x \ + && apkArch="$(cat /etc/apk/arch)" \ + && nginxPackages=" \ + nginx=${NGINX_VERSION}-r${PKG_RELEASE} \ + nginx-module-xslt=${NGINX_VERSION}-r${DYNPKG_RELEASE} \ + nginx-module-geoip=${NGINX_VERSION}-r${DYNPKG_RELEASE} \ + nginx-module-image-filter=${NGINX_VERSION}-r${DYNPKG_RELEASE} \ + nginx-module-njs=${NGINX_VERSION}.${NJS_VERSION}-r${NJS_RELEASE} \ + " \ +# install prerequisites for public key and pkg-oss checks + && apk add --no-cache --virtual .checksum-deps \ + openssl \ + && case "$apkArch" in \ + x86_64|aarch64) \ +# arches officially built by upstream + apk add -X "https://nginx.org/packages/mainline/alpine/v$(egrep -o '^[0-9]+\.[0-9]+' /etc/alpine-release)/main" --no-cache $nginxPackages \ + ;; \ + *) \ +# we're on an architecture upstream doesn't officially build for +# let's build binaries from the published packaging sources + set -x \ + && tempDir="$(mktemp -d)" \ + && chown nobody:nobody $tempDir \ + && apk add --no-cache --virtual .build-deps \ + gcc \ + libc-dev \ + make \ + openssl-dev \ + pcre2-dev \ + zlib-dev \ + linux-headers \ + libxslt-dev \ + gd-dev \ + geoip-dev \ + libedit-dev \ + bash \ + alpine-sdk \ + findutils \ + curl \ + && su nobody -s /bin/sh -c " \ + export HOME=${tempDir} \ + && cd ${tempDir} \ + && curl -f -L -O https://github.com/nginx/pkg-oss/archive/${NGINX_VERSION}-${PKG_RELEASE}.tar.gz \ + && PKGOSSCHECKSUM=\"6982e2df739645fc72db5bdf994032f799718230e7016e811d9d482e5cf41814c888660ca9a68814d5e99ab571e892ada3bd43166e720cbf04c7f85b6934772c *${NGINX_VERSION}-${PKG_RELEASE}.tar.gz\" \ + && if [ \"\$(openssl sha512 -r ${NGINX_VERSION}-${PKG_RELEASE}.tar.gz)\" = \"\$PKGOSSCHECKSUM\" ]; then \ + echo \"pkg-oss tarball checksum verification succeeded!\"; \ + else \ + echo \"pkg-oss tarball checksum verification failed!\"; \ + exit 1; \ + fi \ + && tar xzvf ${NGINX_VERSION}-${PKG_RELEASE}.tar.gz \ + && cd pkg-oss-${NGINX_VERSION}-${PKG_RELEASE} \ + && cd alpine \ + && make module-geoip module-image-filter module-njs module-xslt \ + && apk index --allow-untrusted -o ${tempDir}/packages/alpine/${apkArch}/APKINDEX.tar.gz ${tempDir}/packages/alpine/${apkArch}/*.apk \ + && abuild-sign -k ${tempDir}/.abuild/abuild-key.rsa ${tempDir}/packages/alpine/${apkArch}/APKINDEX.tar.gz \ + " \ + && cp ${tempDir}/.abuild/abuild-key.rsa.pub /etc/apk/keys/ \ + && apk del --no-network .build-deps \ + && apk add -X ${tempDir}/packages/alpine/ --no-cache $nginxPackages \ + ;; \ + esac \ +# remove checksum deps + && apk del --no-network .checksum-deps \ +# if we have leftovers from building, let's purge them (including extra, unnecessary build deps) + && if [ -n "$tempDir" ]; then rm -rf "$tempDir"; fi \ + && if [ -f "/etc/apk/keys/abuild-key.rsa.pub" ]; then rm -f /etc/apk/keys/abuild-key.rsa.pub; fi \ +# Bring in curl and ca-certificates to make registering on DNS SD easier + && apk add --no-cache curl ca-certificates + +USER $UID diff --git a/docker/airbyte-nginx/unprivileged/README.md b/docker/airbyte-nginx/unprivileged/README.md new file mode 100644 index 00000000000..faa3b00237b --- /dev/null +++ b/docker/airbyte-nginx/unprivileged/README.md @@ -0,0 +1,8 @@ +# airbyte-nginx + +This [Dockerfile](Dockerfile) is a copy of the [unprivileged nginx Dockerfile](https://raw.githubusercontent.com/nginxinc/docker-nginx-unprivileged/refs/heads/main/mainline/alpine/Dockerfile). + +We want all of Airbyte to run as the same uid/gid which requires us to build our own nginx image. The nginx provided image uses uid/guid 101/101, and +we want them to be 1000/1000, which requires the `build-arg` values of `UID` and `GID` to be defined. + +See [the docker Makefile](../../Makefile) for where these two `build-arg` values are defined. diff --git a/flags.yml b/flags.yml index 345eed7b923..b3948afb63c 100644 --- a/flags.yml +++ b/flags.yml @@ -55,3 +55,5 @@ flags: serve: true - name: platform.fail-sync-on-invalid-checksum serve: false + - name: platform.allow-sub-one-hour-sync-frequency + serve: true diff --git a/settings.gradle.kts b/settings.gradle.kts index f809796096d..2f116a6c375 100644 --- a/settings.gradle.kts +++ b/settings.gradle.kts @@ -80,6 +80,7 @@ include(":oss:airbyte-api:connector-builder-api") include(":oss:airbyte-api:problems-api") include(":oss:airbyte-api:public-api") include(":oss:airbyte-api:workload-api") +include(":oss:airbyte-audit-logging") include(":oss:airbyte-workload-api-server") include(":oss:airbyte-commons-protocol") include(":oss:airbyte-config:specs") @@ -144,6 +145,7 @@ project(":oss:airbyte-api:connector-builder-api").projectDir = file("airbyte-api project(":oss:airbyte-api:problems-api").projectDir = file("airbyte-api/problems-api") project(":oss:airbyte-api:public-api").projectDir = file("airbyte-api/public-api") project(":oss:airbyte-api:workload-api").projectDir = file("airbyte-api/workload-api") +project(":oss:airbyte-audit-logging").projectDir = file("airbyte-audit-logging") project(":oss:airbyte-workload-api-server").projectDir = file("airbyte-workload-api-server") project(":oss:airbyte-commons-protocol").projectDir = file("airbyte-commons-protocol") project(":oss:airbyte-config:specs").projectDir = file("airbyte-config/specs")