diff --git a/.github/workflows/connectors_version_increment_check.yml b/.github/workflows/connectors_version_increment_check.yml index 27086e018128..b27cb7dad9e9 100644 --- a/.github/workflows/connectors_version_increment_check.yml +++ b/.github/workflows/connectors_version_increment_check.yml @@ -22,6 +22,7 @@ jobs: connectors_ci: name: Connectors Version Increment Check runs-on: connector-test-large + if: github.event.pull_request.head.repo.fork != true timeout-minutes: 10 steps: - name: Checkout Airbyte diff --git a/.github/workflows/format-fix-command.yml b/.github/workflows/format-fix-command.yml index 240cbae7cb06..f0225cfb0e70 100644 --- a/.github/workflows/format-fix-command.yml +++ b/.github/workflows/format-fix-command.yml @@ -72,9 +72,6 @@ jobs: continue-on-error: true with: context: "manual" - dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_2 }} - docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }} - docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }} gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }} sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }} github_token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }} diff --git a/.github/workflows/publish-cdk-command-manually.yml b/.github/workflows/publish-cdk-command-manually.yml index 64719b6fb7c2..38661695f287 100644 --- a/.github/workflows/publish-cdk-command-manually.yml +++ b/.github/workflows/publish-cdk-command-manually.yml @@ -313,8 +313,8 @@ jobs: uses: peter-evans/create-pull-request@v6 with: token: ${{ secrets.GH_PAT_MAINTENANCE_OCTAVIA }} - commit-message: Updating CDK version following release - title: Updating CDK version following release + commit-message: "chore: update CDK version following release" + title: "chore: update CDK version following release" body: This is an automatically generated PR triggered by a CDK release branch: automatic-cdk-release base: master diff --git a/airbyte-cdk/python/CHANGELOG.md b/airbyte-cdk/python/CHANGELOG.md index 82ae866d5bf5..4ae8ccc3c4f8 100644 --- a/airbyte-cdk/python/CHANGELOG.md +++ b/airbyte-cdk/python/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## 1.2.1 +Python 3.11 compatibility bugfixes + +## 1.2.0 +add client side incremental sync + ## 1.1.3 Removed experimental suffix for unstructured file type diff --git a/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/config.py b/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/config.py index 0f42e151653a..792a9b401c2c 100644 --- a/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/config.py +++ b/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/config.py @@ -4,7 +4,7 @@ from typing import Any, Dict, List, Literal, Optional, Union -import dpath.util +import dpath from airbyte_cdk.utils.oneof_option_config import OneOfOptionConfig from airbyte_cdk.utils.spec_schema_transformations import resolve_refs from pydantic import BaseModel, Field @@ -264,7 +264,7 @@ class Config: @staticmethod def remove_discriminator(schema: Dict[str, Any]) -> None: """pydantic adds "discriminator" to the schema for oneOfs, which is not treated right by the platform as we inline all references""" - dpath.util.delete(schema, "properties/**/discriminator") + dpath.delete(schema, "properties/**/discriminator") @classmethod def schema(cls, by_alias: bool = True, ref_template: str = "") -> Dict[str, Any]: diff --git a/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/document_processor.py b/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/document_processor.py index e9a9c007e25e..45b6e4d7bc52 100644 --- a/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/document_processor.py +++ b/airbyte-cdk/python/airbyte_cdk/destinations/vector_db_based/document_processor.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from typing import Any, Dict, List, Mapping, Optional, Tuple -import dpath.util +import dpath from airbyte_cdk.destinations.vector_db_based.config import ProcessingConfigModel, SeparatorSplitterConfigModel, TextSplitterConfigModel from airbyte_cdk.destinations.vector_db_based.utils import create_stream_identifier from airbyte_cdk.models import AirbyteRecordMessage, ConfiguredAirbyteCatalog, ConfiguredAirbyteStream, DestinationSyncMode @@ -137,7 +137,7 @@ def _extract_relevant_fields(self, record: AirbyteRecordMessage, fields: Optiona relevant_fields = {} if fields and len(fields) > 0: for field in fields: - values = dpath.util.values(record.data, field, separator=".") + values = dpath.values(record.data, field, separator=".") if values and len(values) > 0: relevant_fields[field] = values if len(values) > 1 else values[0] else: @@ -162,7 +162,7 @@ def _extract_primary_key(self, record: AirbyteRecordMessage) -> Optional[str]: primary_key = [] for key in current_stream.primary_key: try: - primary_key.append(str(dpath.util.get(record.data, key))) + primary_key.append(str(dpath.get(record.data, key))) except KeyError: primary_key.append("__not_found__") stringified_primary_key = "_".join(primary_key) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/selective_authenticator.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/selective_authenticator.py index 6a9d6128706b..e3f39a0a8ec1 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/selective_authenticator.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/selective_authenticator.py @@ -27,7 +27,7 @@ def __new__( # type: ignore[misc] **kwargs: Any, ) -> DeclarativeAuthenticator: try: - selected_key = str(dpath.util.get(config, authenticator_selection_path)) + selected_key = str(dpath.get(config, authenticator_selection_path)) except KeyError as err: raise ValueError("The path from `authenticator_selection_path` is not found in the config.") from err diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/token_provider.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/token_provider.py index 8c5f31950d5d..6e19afd063d0 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/token_provider.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/auth/token_provider.py @@ -5,10 +5,10 @@ import datetime from abc import abstractmethod -from dataclasses import InitVar, dataclass +from dataclasses import InitVar, dataclass, field from typing import Any, List, Mapping, Optional, Union -import dpath.util +import dpath import pendulum from airbyte_cdk.sources.declarative.decoders.decoder import Decoder from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder @@ -36,7 +36,7 @@ class SessionTokenProvider(TokenProvider): parameters: InitVar[Mapping[str, Any]] message_repository: MessageRepository = NoopMessageRepository() - _decoder: Decoder = JsonDecoder(parameters={}) + _decoder: Decoder = field(default_factory=lambda: JsonDecoder(parameters={})) _next_expiration_time: Optional[DateTime] = None _token: Optional[str] = None @@ -62,7 +62,7 @@ def _refresh(self) -> None: ) if response is None: raise ReadException("Failed to get session token, response got ignored by requester") - session_token = dpath.util.get(self._decoder.decode(response), self.session_token_path) + session_token = dpath.get(self._decoder.decode(response), self.session_token_path) if self.expiration_duration is not None: self._next_expiration_time = pendulum.now() + self.expiration_duration self._token = session_token diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml index 678e70ae2a2f..ddab9445ee93 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/declarative_component_schema.yaml @@ -801,6 +801,10 @@ definitions: title: Whether the target API is formatted as a data feed description: A data feed API is an API that does not allow filtering and paginates the content from the most recent to the least recent. Given this, the CDK needs to know when to stop paginating and this field will generate a stop condition for pagination. type: boolean + is_client_side_incremental: + title: Whether the target API does not support filtering and returns all data (the cursor filters records in the client instead of the API side) + description: If the target API endpoint does not take cursor values to filter records and returns all records anyway, the connector with this cursor will filter out records locally, and only emit new records from the last sync, hence incremental. This means that all records would be read from the API, but only new records will be emitted to the destination. + type: boolean lookback_window: title: Lookback Window description: Time interval before the start_datetime to read data for, e.g. P1M for looking back one month. diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/dpath_extractor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/dpath_extractor.py index cfbe27136451..a13d7f2f5e6c 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/dpath_extractor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/dpath_extractor.py @@ -2,10 +2,10 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dataclasses import InitVar, dataclass +from dataclasses import InitVar, dataclass, field from typing import Any, Iterable, List, Mapping, Union -import dpath.util +import dpath import requests from airbyte_cdk.sources.declarative.decoders.decoder import Decoder from airbyte_cdk.sources.declarative.decoders.json_decoder import JsonDecoder @@ -56,7 +56,7 @@ class DpathExtractor(RecordExtractor): field_path: List[Union[InterpolatedString, str]] config: Config parameters: InitVar[Mapping[str, Any]] - decoder: Decoder = JsonDecoder(parameters={}) + decoder: Decoder = field(default_factory=lambda: JsonDecoder(parameters={})) def __post_init__(self, parameters: Mapping[str, Any]) -> None: self._field_path = [InterpolatedString.create(path, parameters=parameters) for path in self.field_path] @@ -71,9 +71,9 @@ def extract_records(self, response: requests.Response) -> Iterable[Mapping[str, else: path = [path.eval(self.config) for path in self._field_path] if "*" in path: - extracted = dpath.util.values(response_body, path) + extracted = dpath.values(response_body, path) else: - extracted = dpath.util.get(response_body, path, default=[]) + extracted = dpath.get(response_body, path, default=[]) if isinstance(extracted, list): yield from extracted elif extracted: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py index 78e55408a07a..3af9ddaa7e96 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/extractors/record_filter.py @@ -1,10 +1,11 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - +import datetime from dataclasses import InitVar, dataclass from typing import Any, Iterable, Mapping, Optional +from airbyte_cdk.sources.declarative.incremental import DatetimeBasedCursor, PerPartitionCursor from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean from airbyte_cdk.sources.types import Config, StreamSlice, StreamState @@ -36,3 +37,80 @@ def filter_records( for record in records: if self._filter_interpolator.eval(self.config, record=record, **kwargs): yield record + + +class ClientSideIncrementalRecordFilterDecorator(RecordFilter): + """ + Applies a filter to a list of records to exclude those that are older than the stream_state/start_date. + + :param DatetimeBasedCursor date_time_based_cursor: Cursor used to extract datetime values + :param PerPartitionCursor per_partition_cursor: Optional Cursor used for mapping cursor value in nested stream_state + """ + + def __init__( + self, date_time_based_cursor: DatetimeBasedCursor, per_partition_cursor: Optional[PerPartitionCursor] = None, **kwargs: Any + ): + super().__init__(**kwargs) + self._date_time_based_cursor = date_time_based_cursor + self._per_partition_cursor = per_partition_cursor + + @property + def _cursor_field(self) -> str: + return self._date_time_based_cursor.cursor_field.eval(self._date_time_based_cursor.config) # type: ignore # eval returns a string in this context + + @property + def _start_date_from_config(self) -> datetime.datetime: + return self._date_time_based_cursor._start_datetime.get_datetime(self._date_time_based_cursor.config) + + @property + def _end_datetime(self) -> datetime.datetime: + return ( + self._date_time_based_cursor._end_datetime.get_datetime(self._date_time_based_cursor.config) + if self._date_time_based_cursor._end_datetime + else datetime.datetime.max + ) + + def filter_records( + self, + records: Iterable[Mapping[str, Any]], + stream_state: StreamState, + stream_slice: Optional[StreamSlice] = None, + next_page_token: Optional[Mapping[str, Any]] = None, + ) -> Iterable[Mapping[str, Any]]: + state_value = self._get_state_value(stream_state, stream_slice or StreamSlice(partition={}, cursor_slice={})) + filter_date: datetime.datetime = self._get_filter_date(state_value) + records = ( + record + for record in records + if self._end_datetime > self._date_time_based_cursor.parse_date(record[self._cursor_field]) > filter_date + ) + if self.condition: + records = super().filter_records( + records=records, stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token + ) + yield from records + + def _get_state_value(self, stream_state: StreamState, stream_slice: StreamSlice) -> Optional[str]: + """ + Return cursor_value or None in case it was not found. + Cursor_value may be empty if: + 1. It is an initial sync => no stream_state exist at all. + 2. In Parent-child stream, and we already make initial sync, so stream_state is present. + During the second read, we receive one extra record from parent and therefore no stream_state for this record will be found. + + :param StreamState stream_state: State + :param StreamSlice stream_slice: Current Stream slice + :return Optional[str]: cursor_value in case it was found, otherwise None. + """ + if self._per_partition_cursor: + # self._per_partition_cursor is the same object that DeclarativeStream uses to save/update stream_state + partition_state = self._per_partition_cursor.select_state(stream_slice=stream_slice) + return partition_state.get(self._cursor_field) if partition_state else None + return stream_state.get(self._cursor_field) + + def _get_filter_date(self, state_value: Optional[str]) -> datetime.datetime: + start_date_parsed = self._start_date_from_config + if state_value: + return max(start_date_parsed, self._date_time_based_cursor.parse_date(state_value)) + else: + return start_date_parsed diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py index baf794747a0b..495d08db65f0 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/incremental/datetime_based_cursor.py @@ -87,7 +87,7 @@ def __post_init__(self, parameters: Mapping[str, Any]) -> None: else datetime.timedelta.max ) self._cursor_granularity = self._parse_timedelta(self.cursor_granularity) - self._cursor_field = InterpolatedString.create(self.cursor_field, parameters=parameters) + self.cursor_field = InterpolatedString.create(self.cursor_field, parameters=parameters) self._lookback_window = InterpolatedString.create(self.lookback_window, parameters=parameters) if self.lookback_window else None self._partition_field_start = InterpolatedString.create(self.partition_field_start or "start_time", parameters=parameters) self._partition_field_end = InterpolatedString.create(self.partition_field_end or "end_time", parameters=parameters) @@ -103,7 +103,7 @@ def __post_init__(self, parameters: Mapping[str, Any]) -> None: self.cursor_datetime_formats = [self.datetime_format] def get_stream_state(self) -> StreamState: - return {self._cursor_field.eval(self.config): self._cursor} if self._cursor else {} + return {self.cursor_field.eval(self.config): self._cursor} if self._cursor else {} # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__ def set_initial_state(self, stream_state: StreamState) -> None: """ @@ -112,7 +112,7 @@ def set_initial_state(self, stream_state: StreamState) -> None: :param stream_state: The state of the stream as returned by get_stream_state """ - self._cursor = stream_state.get(self._cursor_field.eval(self.config)) if stream_state else None + self._cursor = stream_state.get(self.cursor_field.eval(self.config)) if stream_state else None # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__ def observe(self, stream_slice: StreamSlice, record: Record) -> None: """ @@ -122,7 +122,7 @@ def observe(self, stream_slice: StreamSlice, record: Record) -> None: :param record: the most recently-read record, which the cursor can use to update the stream state. Outwardly-visible changes to the stream state may need to be deferred depending on whether the source reliably orders records by the cursor field. """ - record_cursor_value = record.get(self._cursor_field.eval(self.config)) + record_cursor_value = record.get(self.cursor_field.eval(self.config)) # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__ # if the current record has no cursor value, we cannot meaningfully update the state based on it, so there is nothing more to do if not record_cursor_value: return @@ -186,8 +186,8 @@ def _select_best_end_datetime(self) -> datetime.datetime: return min(self._end_datetime.get_datetime(self.config), now) def _calculate_cursor_datetime_from_state(self, stream_state: Mapping[str, Any]) -> datetime.datetime: - if self._cursor_field.eval(self.config, stream_state=stream_state) in stream_state: - return self.parse_date(stream_state[self._cursor_field.eval(self.config)]) + if self.cursor_field.eval(self.config, stream_state=stream_state) in stream_state: # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__ + return self.parse_date(stream_state[self.cursor_field.eval(self.config)]) # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__ return datetime.datetime.min.replace(tzinfo=datetime.timezone.utc) def _format_datetime(self, dt: datetime.datetime) -> str: @@ -300,7 +300,7 @@ def _get_request_options(self, option_type: RequestOptionType, stream_slice: Opt return options def should_be_synced(self, record: Record) -> bool: - cursor_field = self._cursor_field.eval(self.config) + cursor_field = self.cursor_field.eval(self.config) # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__ record_cursor_value = record.get(cursor_field) if not record_cursor_value: self._send_log( @@ -315,7 +315,7 @@ def should_be_synced(self, record: Record) -> bool: def _is_within_daterange_boundaries( self, record: Record, start_datetime_boundary: Union[datetime.datetime, str], end_datetime_boundary: Union[datetime.datetime, str] ) -> bool: - cursor_field = self._cursor_field.eval(self.config) + cursor_field = self.cursor_field.eval(self.config) # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__ record_cursor_value = record.get(cursor_field) if not record_cursor_value: self._send_log( @@ -339,7 +339,7 @@ def _send_log(self, level: Level, message: str) -> None: ) def is_greater_than_or_equal(self, first: Record, second: Record) -> bool: - cursor_field = self._cursor_field.eval(self.config) + cursor_field = self.cursor_field.eval(self.config) # type: ignore # cursor_field is converted to an InterpolatedString in __post_init__ first_cursor_value = first.get(cursor_field) second_cursor_value = second.get(cursor_field) if first_cursor_value and second_cursor_value: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py index 5bff43aef9a5..c0c51145ad04 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/models/declarative_component_schema.py @@ -937,6 +937,11 @@ class DatetimeBasedCursor(BaseModel): description='A data feed API is an API that does not allow filtering and paginates the content from the most recent to the least recent. Given this, the CDK needs to know when to stop paginating and this field will generate a stop condition for pagination.', title='Whether the target API is formatted as a data feed', ) + is_client_side_incremental: Optional[bool] = Field( + None, + description='If the target API endpoint does not take cursor values to filter records and returns all records anyway, the connector with this cursor will filter out records locally, and only emit new records from the last sync, hence incremental. This means that all records would be read from the API, but only new records will be emitted to the destination.', + title='Whether the target API does not support filtering and returns all data (the cursor filters records in the client instead of the API side)', + ) lookback_window: Optional[str] = Field( None, description='Time interval before the start_datetime to read data for, e.g. P1M for looking back one month.', diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py index 5940673de590..6b568321c308 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/parsers/model_to_component_factory.py @@ -7,7 +7,7 @@ import importlib import inspect import re -from typing import Any, Callable, List, Mapping, Optional, Type, Union, get_args, get_origin, get_type_hints +from typing import Any, Callable, Dict, List, Mapping, Optional, Type, Union, get_args, get_origin, get_type_hints from airbyte_cdk.models import Level from airbyte_cdk.sources.declarative.auth import DeclarativeOauth2Authenticator, JwtAuthenticator @@ -27,6 +27,7 @@ from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream from airbyte_cdk.sources.declarative.decoders import JsonDecoder from airbyte_cdk.sources.declarative.extractors import DpathExtractor, RecordFilter, RecordSelector +from airbyte_cdk.sources.declarative.extractors.record_filter import ClientSideIncrementalRecordFilterDecorator from airbyte_cdk.sources.declarative.extractors.record_selector import SCHEMA_TRANSFORMER_TYPE_MAPPING from airbyte_cdk.sources.declarative.incremental import ( CursorFactory, @@ -558,6 +559,8 @@ def create_datetime_based_cursor(self, model: DatetimeBasedCursorModel, config: end_datetime: Union[str, MinMaxDatetime, None] = None if model.is_data_feed and model.end_datetime: raise ValueError("Data feed does not support end_datetime") + if model.is_data_feed and model.is_client_side_incremental: + raise ValueError("`Client side incremental` cannot be applied with `data feed`. Choose only 1 from them.") if model.end_datetime: end_datetime = ( model.end_datetime if isinstance(model.end_datetime, str) else self.create_min_max_datetime(model.end_datetime, config) @@ -611,6 +614,18 @@ def create_declarative_stream(self, model: DeclarativeStreamModel, config: Confi stop_condition_on_cursor = ( model.incremental_sync and hasattr(model.incremental_sync, "is_data_feed") and model.incremental_sync.is_data_feed ) + client_side_incremental_sync = None + if ( + model.incremental_sync + and hasattr(model.incremental_sync, "is_client_side_incremental") + and model.incremental_sync.is_client_side_incremental + ): + if combined_slicers and not isinstance(combined_slicers, (DatetimeBasedCursor, PerPartitionCursor)): + raise ValueError("Unsupported Slicer is used. PerPartitionCursor should be used here instead") + client_side_incremental_sync = { + "date_time_based_cursor": self._create_component_from_model(model=model.incremental_sync, config=config), + "per_partition_cursor": combined_slicers if isinstance(combined_slicers, PerPartitionCursor) else None, + } transformations = [] if model.transformations: for transformation_model in model.transformations: @@ -622,6 +637,7 @@ def create_declarative_stream(self, model: DeclarativeStreamModel, config: Confi primary_key=primary_key, stream_slicer=combined_slicers, stop_condition_on_cursor=stop_condition_on_cursor, + client_side_incremental_sync=client_side_incremental_sync, transformations=transformations, ) cursor_field = model.incremental_sync.cursor_field if model.incremental_sync else None @@ -982,11 +998,19 @@ def create_record_selector( config: Config, *, transformations: List[RecordTransformation], + client_side_incremental_sync: Optional[Dict[str, Any]] = None, **kwargs: Any, ) -> RecordSelector: assert model.schema_normalization is not None # for mypy extractor = self._create_component_from_model(model=model.extractor, config=config) record_filter = self._create_component_from_model(model.record_filter, config=config) if model.record_filter else None + if client_side_incremental_sync: + record_filter = ClientSideIncrementalRecordFilterDecorator( + config=config, + parameters=model.parameters, + condition=model.record_filter.condition if model.record_filter else None, + **client_side_incremental_sync, + ) schema_normalization = TypeTransformer(SCHEMA_TRANSFORMER_TYPE_MAPPING[model.schema_normalization]) return RecordSelector( @@ -1038,10 +1062,16 @@ def create_simple_retriever( primary_key: Optional[Union[str, List[str], List[List[str]]]], stream_slicer: Optional[StreamSlicer], stop_condition_on_cursor: bool = False, + client_side_incremental_sync: Optional[Dict[str, Any]] = None, transformations: List[RecordTransformation], ) -> SimpleRetriever: requester = self._create_component_from_model(model=model.requester, config=config, name=name) - record_selector = self._create_component_from_model(model=model.record_selector, config=config, transformations=transformations) + record_selector = self._create_component_from_model( + model=model.record_selector, + config=config, + transformations=transformations, + client_side_incremental_sync=client_side_incremental_sync, + ) url_base = model.requester.url_base if hasattr(model.requester, "url_base") else requester.get_url_base() stream_slicer = stream_slicer or SinglePartitionRouter(parameters={}) cursor = stream_slicer if isinstance(stream_slicer, DeclarativeCursor) else None diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py index 7a43d5ff3495..ad293d2dac0d 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/partition_routers/substream_partition_router.py @@ -5,7 +5,7 @@ from dataclasses import InitVar, dataclass from typing import TYPE_CHECKING, Any, Iterable, List, Mapping, Optional, Union -import dpath.util +import dpath from airbyte_cdk.models import AirbyteMessage, SyncMode, Type from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString from airbyte_cdk.sources.declarative.requesters.request_option import RequestOption, RequestOptionType @@ -145,7 +145,7 @@ def stream_slices(self) -> Iterable[StreamSlice]: elif isinstance(parent_record, Record): parent_record = parent_record.data try: - partition_value = dpath.util.get(parent_record, parent_field) + partition_value = dpath.get(parent_record, parent_field) except KeyError: pass else: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py index d92de18120d7..f8ad541ec7c5 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/default_paginator.py @@ -2,7 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dataclasses import InitVar, dataclass +from dataclasses import InitVar, dataclass, field from typing import Any, Mapping, MutableMapping, Optional, Union import requests @@ -90,7 +90,7 @@ class DefaultPaginator(Paginator): config: Config url_base: Union[InterpolatedString, str] parameters: InitVar[Mapping[str, Any]] - decoder: Decoder = JsonDecoder(parameters={}) + decoder: Decoder = field(default_factory=lambda: JsonDecoder(parameters={})) page_size_option: Optional[RequestOption] = None page_token_option: Optional[Union[RequestPath, RequestOption]] = None diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_pagination_strategy.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_pagination_strategy.py index fe576982b2a9..d4bcc57f4070 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_pagination_strategy.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/cursor_pagination_strategy.py @@ -2,7 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dataclasses import InitVar, dataclass +from dataclasses import InitVar, dataclass, field from typing import Any, Dict, Mapping, Optional, Union import requests @@ -32,7 +32,7 @@ class CursorPaginationStrategy(PaginationStrategy): parameters: InitVar[Mapping[str, Any]] page_size: Optional[int] = None stop_condition: Optional[Union[InterpolatedBoolean, str]] = None - decoder: Decoder = JsonDecoder(parameters={}) + decoder: Decoder = field(default_factory=lambda: JsonDecoder(parameters={})) def __post_init__(self, parameters: Mapping[str, Any]) -> None: self._initial_cursor = None diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/offset_increment.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/offset_increment.py index 851d060b8f84..6b5f22176594 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/offset_increment.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/requesters/paginators/strategies/offset_increment.py @@ -2,7 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dataclasses import InitVar, dataclass +from dataclasses import InitVar, dataclass, field from typing import Any, Mapping, Optional, Union import requests @@ -39,7 +39,7 @@ class OffsetIncrement(PaginationStrategy): config: Config page_size: Optional[Union[str, int]] parameters: InitVar[Mapping[str, Any]] - decoder: Decoder = JsonDecoder(parameters={}) + decoder: Decoder = field(default_factory=lambda: JsonDecoder(parameters={})) inject_on_first_request: bool = False def __post_init__(self, parameters: Mapping[str, Any]) -> None: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py index abc14f1cd827..d8ada5f500ab 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/retrievers/simple_retriever.py @@ -60,7 +60,7 @@ class SimpleRetriever(Retriever): primary_key: Optional[Union[str, List[str], List[List[str]]]] _primary_key: str = field(init=False, repr=False, default="") paginator: Optional[Paginator] = None - stream_slicer: StreamSlicer = SinglePartitionRouter(parameters={}) + stream_slicer: StreamSlicer = field(default_factory=lambda: SinglePartitionRouter(parameters={})) cursor: Optional[DeclarativeCursor] = None ignore_stream_slicer_parameters_on_paginated_requests: bool = False diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/add_fields.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/add_fields.py index 07ad6b2f0265..67290d2a5d95 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/add_fields.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/add_fields.py @@ -5,7 +5,7 @@ from dataclasses import InitVar, dataclass, field from typing import Any, List, Mapping, Optional, Type, Union -import dpath.util +import dpath from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString from airbyte_cdk.sources.declarative.transformations import RecordTransformation from airbyte_cdk.sources.types import Config, FieldPointer, Record, StreamSlice, StreamState @@ -122,7 +122,7 @@ def transform( for parsed_field in self._parsed_fields: valid_types = (parsed_field.value_type,) if parsed_field.value_type else None value = parsed_field.value.eval(config, valid_types=valid_types, **kwargs) - dpath.util.new(record, parsed_field.path, value) + dpath.new(record, parsed_field.path, value) return record diff --git a/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/remove_fields.py b/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/remove_fields.py index 327f006e95be..1d4edfc39367 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/remove_fields.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/declarative/transformations/remove_fields.py @@ -5,8 +5,8 @@ from dataclasses import InitVar, dataclass from typing import Any, List, Mapping, Optional +import dpath import dpath.exceptions -import dpath.util from airbyte_cdk.sources.declarative.interpolation.interpolated_boolean import InterpolatedBoolean from airbyte_cdk.sources.declarative.transformations import RecordTransformation from airbyte_cdk.sources.types import Config, FieldPointer, StreamSlice, StreamState @@ -60,7 +60,7 @@ def transform( for pointer in self.field_pointers: # the dpath library by default doesn't delete fields from arrays try: - dpath.util.delete( + dpath.delete( record, pointer, afilter=(lambda x: self._filter_interpolator.eval(config or {}, property=x)) if self.condition else None, diff --git a/airbyte-cdk/python/airbyte_cdk/sources/embedded/tools.py b/airbyte-cdk/python/airbyte_cdk/sources/embedded/tools.py index 5777e567dd4c..39d70c118cd0 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/embedded/tools.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/embedded/tools.py @@ -18,7 +18,7 @@ def get_defined_id(stream: AirbyteStream, data: Dict[str, Any]) -> Optional[str] primary_key = [] for key in stream.source_defined_primary_key: try: - primary_key.append(str(dpath.util.get(data, key))) + primary_key.append(str(dpath.get(data, key))) except KeyError: primary_key.append("__not_found__") return "_".join(primary_key) diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/config/abstract_file_based_spec.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/config/abstract_file_based_spec.py index b667343add80..7a82debc069c 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/config/abstract_file_based_spec.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/config/abstract_file_based_spec.py @@ -6,7 +6,7 @@ from abc import abstractmethod from typing import Any, Dict, List, Optional -import dpath.util +import dpath from airbyte_cdk.sources.file_based.config.file_based_stream_config import FileBasedStreamConfig from airbyte_cdk.sources.utils import schema_helpers from pydantic import AnyUrl, BaseModel, Field @@ -57,7 +57,7 @@ def schema(cls, *args: Any, **kwargs: Any) -> Dict[str, Any]: @staticmethod def remove_discriminator(schema: Dict[str, Any]) -> None: """pydantic adds "discriminator" to the schema for oneOfs, which is not treated right by the platform as we inline all references""" - dpath.util.delete(schema, "properties/**/discriminator") + dpath.delete(schema, "properties/**/discriminator") @staticmethod def replace_enum_allOf_and_anyOf(schema: Dict[str, Any]) -> Dict[str, Any]: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/unstructured_parser.py b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/unstructured_parser.py index 7c117b208672..659fbd2c4734 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/unstructured_parser.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/file_based/file_types/unstructured_parser.py @@ -8,7 +8,7 @@ from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union import backoff -import dpath.util +import dpath import requests from airbyte_cdk.models import FailureType from airbyte_cdk.sources.file_based.config.file_based_stream_config import FileBasedStreamConfig @@ -335,15 +335,15 @@ def _render_markdown(self, elements: List[Any]) -> str: return "\n\n".join((self._convert_to_markdown(el) for el in elements)) def _convert_to_markdown(self, el: Dict[str, Any]) -> str: - if dpath.util.get(el, "type") == "Title": - heading_str = "#" * (dpath.util.get(el, "metadata/category_depth", default=1) or 1) - return f"{heading_str} {dpath.util.get(el, 'text')}" - elif dpath.util.get(el, "type") == "ListItem": - return f"- {dpath.util.get(el, 'text')}" - elif dpath.util.get(el, "type") == "Formula": - return f"```\n{dpath.util.get(el, 'text')}\n```" + if dpath.get(el, "type") == "Title": + heading_str = "#" * (dpath.get(el, "metadata/category_depth", default=1) or 1) + return f"{heading_str} {dpath.get(el, 'text')}" + elif dpath.get(el, "type") == "ListItem": + return f"- {dpath.get(el, 'text')}" + elif dpath.get(el, "type") == "Formula": + return f"```\n{dpath.get(el, 'text')}\n```" else: - return str(dpath.util.get(el, "text", default="")) + return str(dpath.get(el, "text", default="")) @property def file_read_mode(self) -> FileReadMode: diff --git a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py index 48a855fa515f..1728f4099797 100644 --- a/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py +++ b/airbyte-cdk/python/airbyte_cdk/sources/streams/http/requests_native_auth/oauth.py @@ -153,10 +153,8 @@ def __init__( token_expiry_is_time_of_expiration bool: set True it if expires_in is returned as time of expiration instead of the number seconds until expiration message_repository (MessageRepository): the message repository used to emit logs on HTTP requests and control message on config update """ - self._client_id = client_id if client_id is not None else dpath.util.get(connector_config, ("credentials", "client_id")) - self._client_secret = ( - client_secret if client_secret is not None else dpath.util.get(connector_config, ("credentials", "client_secret")) - ) + self._client_id = client_id if client_id is not None else dpath.get(connector_config, ("credentials", "client_id")) + self._client_secret = client_secret if client_secret is not None else dpath.get(connector_config, ("credentials", "client_secret")) self._access_token_config_path = access_token_config_path self._refresh_token_config_path = refresh_token_config_path self._token_expiry_date_config_path = token_expiry_date_config_path @@ -193,24 +191,24 @@ def get_client_secret(self) -> str: @property def access_token(self) -> str: - return dpath.util.get(self._connector_config, self._access_token_config_path, default="") + return dpath.get(self._connector_config, self._access_token_config_path, default="") @access_token.setter def access_token(self, new_access_token: str): - dpath.util.new(self._connector_config, self._access_token_config_path, new_access_token) + dpath.new(self._connector_config, self._access_token_config_path, new_access_token) def get_refresh_token(self) -> str: - return dpath.util.get(self._connector_config, self._refresh_token_config_path, default="") + return dpath.get(self._connector_config, self._refresh_token_config_path, default="") def set_refresh_token(self, new_refresh_token: str): - dpath.util.new(self._connector_config, self._refresh_token_config_path, new_refresh_token) + dpath.new(self._connector_config, self._refresh_token_config_path, new_refresh_token) def get_token_expiry_date(self) -> pendulum.DateTime: - expiry_date = dpath.util.get(self._connector_config, self._token_expiry_date_config_path, default="") + expiry_date = dpath.get(self._connector_config, self._token_expiry_date_config_path, default="") return pendulum.now().subtract(days=1) if expiry_date == "" else pendulum.parse(expiry_date) def set_token_expiry_date(self, new_token_expiry_date): - dpath.util.new(self._connector_config, self._token_expiry_date_config_path, str(new_token_expiry_date)) + dpath.new(self._connector_config, self._token_expiry_date_config_path, str(new_token_expiry_date)) def token_has_expired(self) -> bool: """Returns True if the token is expired""" diff --git a/airbyte-cdk/python/airbyte_cdk/utils/airbyte_secrets_utils.py b/airbyte-cdk/python/airbyte_cdk/utils/airbyte_secrets_utils.py index e690a556606b..5afd305f38ed 100644 --- a/airbyte-cdk/python/airbyte_cdk/utils/airbyte_secrets_utils.py +++ b/airbyte-cdk/python/airbyte_cdk/utils/airbyte_secrets_utils.py @@ -4,7 +4,7 @@ from typing import Any, List, Mapping -import dpath.util +import dpath def get_secret_paths(spec: Mapping[str, Any]) -> List[List[str]]: @@ -45,7 +45,7 @@ def get_secrets(connection_specification: Mapping[str, Any], config: Mapping[str result = [] for path in secret_paths: try: - result.append(dpath.util.get(config, path)) + result.append(dpath.get(config, path)) except KeyError: # Since we try to get paths to all known secrets in the spec, in the case of oneOfs, some secret fields may not be present # In that case, a KeyError is thrown. This is expected behavior. diff --git a/airbyte-cdk/python/pyproject.toml b/airbyte-cdk/python/pyproject.toml index 1b94c65f9734..3d9477542f61 100644 --- a/airbyte-cdk/python/pyproject.toml +++ b/airbyte-cdk/python/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "airbyte-cdk" -version = "1.1.3" +version = "1.2.1" description = "A framework for writing Airbyte Connectors." authors = ["Airbyte "] license = "MIT" diff --git a/airbyte-cdk/python/unit_tests/__init__.py b/airbyte-cdk/python/unit_tests/__init__.py index b6b74b56a60d..51e56f3ad0e1 100644 --- a/airbyte-cdk/python/unit_tests/__init__.py +++ b/airbyte-cdk/python/unit_tests/__init__.py @@ -2,5 +2,6 @@ # Import the thing that needs to be imported to stop the tests from falling over from airbyte_cdk.sources.declarative.manifest_declarative_source import ManifestDeclarativeSource + # "Use" the thing so that the linter doesn't complain placeholder = ManifestDeclarativeSource diff --git a/airbyte-cdk/python/unit_tests/destinations/vector_db_based/config_test.py b/airbyte-cdk/python/unit_tests/destinations/vector_db_based/config_test.py index c6ccf6da1985..71f2d15a038a 100644 --- a/airbyte-cdk/python/unit_tests/destinations/vector_db_based/config_test.py +++ b/airbyte-cdk/python/unit_tests/destinations/vector_db_based/config_test.py @@ -4,7 +4,7 @@ from typing import Union -import dpath.util +import dpath from airbyte_cdk.destinations.vector_db_based.config import ( AzureOpenAIEmbeddingConfigModel, CohereEmbeddingConfigModel, @@ -57,7 +57,7 @@ class Config: @staticmethod def remove_discriminator(schema: dict) -> None: """pydantic adds "discriminator" to the schema for oneOfs, which is not treated right by the platform as we inline all references""" - dpath.util.delete(schema, "properties/**/discriminator") + dpath.delete(schema, "properties/**/discriminator") @classmethod def schema(cls): diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_filter.py b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_filter.py index 2104e4243120..1ce4d249ac69 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_filter.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/extractors/test_record_filter.py @@ -1,47 +1,50 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from typing import List, Mapping, Optional import pytest -from airbyte_cdk.sources.declarative.extractors.record_filter import RecordFilter +from airbyte_cdk.sources.declarative.datetime import MinMaxDatetime +from airbyte_cdk.sources.declarative.extractors.record_filter import ClientSideIncrementalRecordFilterDecorator, RecordFilter +from airbyte_cdk.sources.declarative.incremental import CursorFactory, DatetimeBasedCursor, PerPartitionCursor +from airbyte_cdk.sources.declarative.interpolation import InterpolatedString +from airbyte_cdk.sources.declarative.models import CustomRetriever, DeclarativeStream, ParentStreamConfig, SubstreamPartitionRouter +from airbyte_cdk.sources.declarative.types import StreamSlice @pytest.mark.parametrize( - "test_name, filter_template, records, expected_records", + "filter_template, records, expected_records", [ ( - "test_using_state_filter", - "{{ record['created_at'] > stream_state['created_at'] }}", - [{"id": 1, "created_at": "06-06-21"}, {"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], - [{"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], + "{{ record['created_at'] > stream_state['created_at'] }}", + [{"id": 1, "created_at": "06-06-21"}, {"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], + [{"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], ), ( - "test_with_slice_filter", - "{{ record['last_seen'] >= stream_slice['last_seen'] }}", - [{"id": 1, "last_seen": "06-06-21"}, {"id": 2, "last_seen": "06-07-21"}, {"id": 3, "last_seen": "06-10-21"}], - [{"id": 3, "last_seen": "06-10-21"}], + "{{ record['last_seen'] >= stream_slice['last_seen'] }}", + [{"id": 1, "last_seen": "06-06-21"}, {"id": 2, "last_seen": "06-07-21"}, {"id": 3, "last_seen": "06-10-21"}], + [{"id": 3, "last_seen": "06-10-21"}], ), ( - "test_with_next_page_token_filter", - "{{ record['id'] >= next_page_token['last_seen_id'] }}", - [{"id": 11}, {"id": 12}, {"id": 13}, {"id": 14}, {"id": 15}], - [{"id": 14}, {"id": 15}], + "{{ record['id'] >= next_page_token['last_seen_id'] }}", + [{"id": 11}, {"id": 12}, {"id": 13}, {"id": 14}, {"id": 15}], + [{"id": 14}, {"id": 15}], ), ( - "test_missing_filter_fields_return_no_results", - "{{ record['id'] >= next_page_token['path_to_nowhere'] }}", - [{"id": 11}, {"id": 12}, {"id": 13}, {"id": 14}, {"id": 15}], - [], + "{{ record['id'] >= next_page_token['path_to_nowhere'] }}", + [{"id": 11}, {"id": 12}, {"id": 13}, {"id": 14}, {"id": 15}], + [], ), ( - "test_using_parameters_filter", - "{{ record['created_at'] > parameters['created_at'] }}", - [{"id": 1, "created_at": "06-06-21"}, {"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], - [{"id": 3, "created_at": "06-08-21"}], + "{{ record['created_at'] > parameters['created_at'] }}", + [{"id": 1, "created_at": "06-06-21"}, {"id": 2, "created_at": "06-07-21"}, {"id": 3, "created_at": "06-08-21"}], + [{"id": 3, "created_at": "06-08-21"}], ), ], + ids=["test_using_state_filter", "test_with_slice_filter", "test_with_next_page_token_filter", + "test_missing_filter_fields_return_no_results", "test_using_parameters_filter", ] ) -def test_record_filter(test_name, filter_template, records, expected_records): +def test_record_filter(filter_template: str, records: List[Mapping], expected_records: List[Mapping]): config = {"response_override": "stop_if_you_see_me"} parameters = {"created_at": "06-07-21"} stream_state = {"created_at": "06-06-21"} @@ -53,3 +56,109 @@ def test_record_filter(test_name, filter_template, records, expected_records): records, stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token )) assert actual_records == expected_records + + +@pytest.mark.parametrize( + "stream_state, record_filter_expression, expected_record_ids", + [ + ({}, None, [2, 3]), + ({"created_at": "2021-01-03"}, None, [3]), + ({}, "{{ record['id'] % 2 == 1 }}", [3]), + ], + ids=["no_stream_state_no_record_filter", "with_stream_state_no_record_filter", "no_stream_state_with_record_filter"] +) +def test_client_side_record_filter_decorator_no_parent_stream(stream_state: Optional[Mapping], record_filter_expression: str, + expected_record_ids: List[int]): + records_to_filter = [ + {"id": 1, "created_at": "2020-01-03"}, + {"id": 2, "created_at": "2021-01-03"}, + {"id": 3, "created_at": "2021-01-04"}, + {"id": 4, "created_at": "2021-02-01"}, + ] + date_time_based_cursor = DatetimeBasedCursor( + start_datetime=MinMaxDatetime(datetime="2021-01-01", datetime_format="%Y-%m-%d", parameters={}), + end_datetime=MinMaxDatetime(datetime="2021-01-05", datetime_format="%Y-%m-%d", parameters={}), + step="P10Y", + cursor_field=InterpolatedString.create("created_at", parameters={}), + datetime_format="%Y-%m-%d", + cursor_granularity="P1D", + config={}, + parameters={}, + ) + + record_filter_decorator = ClientSideIncrementalRecordFilterDecorator( + config={}, + condition=record_filter_expression, + parameters={}, + date_time_based_cursor=date_time_based_cursor, + per_partition_cursor=None + ) + + filtered_records = list( + record_filter_decorator.filter_records(records=records_to_filter, stream_state=stream_state, stream_slice={}, next_page_token=None) + ) + + assert [x.get("id") for x in filtered_records] == expected_record_ids + + +@pytest.mark.parametrize( + "stream_state, expected_record_ids", + [ + ({}, [2, 3]), + ({"states": [{"some_parent_id": {"created_at": "2021-01-03"}}]}, [3]), + ], + ids=["no_stream_state_no_record_filter", "with_stream_state_no_record_filter"] +) +def test_client_side_record_filter_decorator_with_parent_stream(stream_state: Optional[Mapping], expected_record_ids: List[int]): + records_to_filter = [ + {"id": 1, "created_at": "2020-01-03"}, + {"id": 2, "created_at": "2021-01-03"}, + {"id": 3, "created_at": "2021-01-04"}, + {"id": 4, "created_at": "2021-02-01"}, + ] + date_time_based_cursor = DatetimeBasedCursor( + start_datetime=MinMaxDatetime(datetime="2021-01-01", datetime_format="%Y-%m-%d", parameters={}), + end_datetime=MinMaxDatetime(datetime="2021-01-05", datetime_format="%Y-%m-%d", parameters={}), + step="P10Y", + cursor_field=InterpolatedString.create("created_at", parameters={}), + datetime_format="%Y-%m-%d", + cursor_granularity="P1D", + config={}, + parameters={}, + ) + per_partition_cursor = PerPartitionCursor( + cursor_factory=CursorFactory( + lambda: date_time_based_cursor), + partition_router=SubstreamPartitionRouter( + type="SubstreamPartitionRouter", + parent_stream_configs=[ + ParentStreamConfig( + type="ParentStreamConfig", + parent_key="id", + partition_field="id", + stream=DeclarativeStream( + type="DeclarativeStream", + retriever=CustomRetriever( + type="CustomRetriever", + class_name="a_class_name" + ) + ) + ) + ] + ), + ) + if stream_state: + per_partition_cursor.set_initial_state({"states": [{"partition": {"id": "some_parent_id", "parent_slice": {}}, "cursor": {'created_at': '2021-01-03'}}]}) + record_filter_decorator = ClientSideIncrementalRecordFilterDecorator( + config={}, + parameters={}, + date_time_based_cursor=date_time_based_cursor, + per_partition_cursor=per_partition_cursor + ) + filtered_records = list( + record_filter_decorator.filter_records(records=records_to_filter, stream_state=stream_state, + stream_slice=StreamSlice(partition={"id": "some_parent_id", "parent_slice": {}}, cursor_slice={}), + next_page_token=None) + ) + + assert [x.get("id") for x in filtered_records] == expected_record_ids diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_nested_mapping.py b/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_nested_mapping.py index cb0476c7a3ca..c1dea1d62fb2 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_nested_mapping.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/interpolation/test_interpolated_nested_mapping.py @@ -2,7 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import dpath.util +import dpath import pytest from airbyte_cdk.sources.declarative.interpolation.interpolated_nested_mapping import InterpolatedNestedMapping @@ -42,4 +42,4 @@ def test(test_name, path, expected_value): interpolated = mapping.eval(config, **{"kwargs": kwargs}) - assert dpath.util.get(interpolated, path) == expected_value + assert dpath.get(interpolated, path) == expected_value diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py index fba43c12ea2c..44f39eaccfc9 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/test_model_to_component_factory.py @@ -22,6 +22,7 @@ from airbyte_cdk.sources.declarative.declarative_stream import DeclarativeStream from airbyte_cdk.sources.declarative.decoders import JsonDecoder from airbyte_cdk.sources.declarative.extractors import DpathExtractor, RecordFilter, RecordSelector +from airbyte_cdk.sources.declarative.extractors.record_filter import ClientSideIncrementalRecordFilterDecorator from airbyte_cdk.sources.declarative.incremental import DatetimeBasedCursor, PerPartitionCursor, ResumableFullRefreshCursor from airbyte_cdk.sources.declarative.interpolation import InterpolatedString from airbyte_cdk.sources.declarative.models import CheckStream as CheckStreamModel @@ -535,7 +536,7 @@ def test_datetime_based_cursor(): assert isinstance(stream_slicer, DatetimeBasedCursor) assert stream_slicer._step == datetime.timedelta(days=10) - assert stream_slicer._cursor_field.string == "created" + assert stream_slicer.cursor_field.string == "created" assert stream_slicer.cursor_granularity == "PT0.000001S" assert stream_slicer._lookback_window.string == "P5D" assert stream_slicer.start_time_option.inject_into == RequestOptionType.request_parameter @@ -651,7 +652,7 @@ def test_stream_with_incremental_and_retriever_with_partition_router(): assert isinstance(datetime_stream_slicer._end_datetime, MinMaxDatetime) assert datetime_stream_slicer._end_datetime.datetime.string == "{{ config['end_time'] }}" assert datetime_stream_slicer.step == "P10D" - assert datetime_stream_slicer._cursor_field.string == "created" + assert datetime_stream_slicer.cursor_field.string == "created" list_stream_slicer = stream.retriever.stream_slicer._partition_router assert isinstance(list_stream_slicer, ListPartitionRouter) @@ -862,6 +863,156 @@ def test_given_data_feed_and_incremental_then_raise_error(): ) +def test_client_side_incremental(): + content = """ +selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["extractor_path"] +requester: + type: HttpRequester + name: "{{ parameters['name'] }}" + url_base: "https://api.sendgrid.com/v3/" + http_method: "GET" +list_stream: + type: DeclarativeStream + incremental_sync: + type: DatetimeBasedCursor + $parameters: + datetime_format: "%Y-%m-%dT%H:%M:%S.%f%z" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.get('start_date', '1970-01-01T00:00:00.0Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + cursor_field: "created" + is_client_side_incremental: true + retriever: + type: SimpleRetriever + name: "{{ parameters['name'] }}" + paginator: + type: DefaultPaginator + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response._metadata.next }}" + page_size: 10 + requester: + $ref: "#/requester" + path: "/" + record_selector: + $ref: "#/selector" + $parameters: + name: "lists" + """ + + parsed_manifest = YamlDeclarativeSource._parse(content) + resolved_manifest = resolver.preprocess_manifest(parsed_manifest) + stream_manifest = transformer.propagate_types_and_parameters("", resolved_manifest["list_stream"], {}) + + stream = factory.create_component(model_type=DeclarativeStreamModel, component_definition=stream_manifest, config=input_config) + + assert isinstance(stream.retriever.record_selector.record_filter, ClientSideIncrementalRecordFilterDecorator) + + +def test_client_side_incremental_with_partition_router(): + content = """ +selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["extractor_path"] +requester: + type: HttpRequester + name: "{{ parameters['name'] }}" + url_base: "https://api.sendgrid.com/v3/" + http_method: "GET" +schema_loader: + file_path: "./source_sendgrid/schemas/{{ parameters['name'] }}.yaml" + name: "{{ parameters['stream_name'] }}" +retriever: + requester: + type: "HttpRequester" + path: "kek" + record_selector: + extractor: + field_path: [] +stream_A: + type: DeclarativeStream + name: "A" + primary_key: "id" + $parameters: + retriever: "#/retriever" + url_base: "https://airbyte.io" + schema_loader: "#/schema_loader" +list_stream: + type: DeclarativeStream + incremental_sync: + type: DatetimeBasedCursor + $parameters: + datetime_format: "%Y-%m-%dT%H:%M:%S.%f%z" + start_datetime: + type: MinMaxDatetime + datetime: "{{ config.get('start_date', '1970-01-01T00:00:00.0Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%S.%fZ" + cursor_field: "created" + is_client_side_incremental: true + retriever: + type: SimpleRetriever + name: "{{ parameters['name'] }}" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - stream: "#/stream_A" + parent_key: id + partition_field: id + paginator: + type: DefaultPaginator + pagination_strategy: + type: "CursorPagination" + cursor_value: "{{ response._metadata.next }}" + page_size: 10 + requester: + $ref: "#/requester" + path: "/" + record_selector: + $ref: "#/selector" + $parameters: + name: "lists" + """ + + parsed_manifest = YamlDeclarativeSource._parse(content) + resolved_manifest = resolver.preprocess_manifest(parsed_manifest) + stream_manifest = transformer.propagate_types_and_parameters("", resolved_manifest["list_stream"], {}) + + stream = factory.create_component(model_type=DeclarativeStreamModel, component_definition=stream_manifest, config=input_config) + + assert isinstance(stream.retriever.record_selector.record_filter, ClientSideIncrementalRecordFilterDecorator) + assert isinstance(stream.retriever.record_selector.record_filter._per_partition_cursor, PerPartitionCursor) + + +def test_given_data_feed_and_client_side_incremental_then_raise_error(): + content = """ +incremental_sync: + type: DatetimeBasedCursor + $parameters: + datetime_format: "%Y-%m-%dT%H:%M:%S.%f%z" + start_datetime: "{{ config['start_time'] }}" + cursor_field: "created" + is_data_feed: true + is_client_side_incremental: true + """ + + parsed_incremental_sync = YamlDeclarativeSource._parse(content) + resolved_incremental_sync = resolver.preprocess_manifest(parsed_incremental_sync) + datetime_based_cursor_definition = transformer.propagate_types_and_parameters("", resolved_incremental_sync["incremental_sync"], {}) + + with pytest.raises(ValueError) as e: + factory.create_component( + model_type=DatetimeBasedCursorModel, component_definition=datetime_based_cursor_definition, config=input_config + ) + assert e.value.args[0] == "`Client side incremental` cannot be applied with `data feed`. Choose only 1 from them." + + @pytest.mark.parametrize( "test_name, record_selector, expected_runtime_selector", [("test_static_record_selector", "result", "result"), ("test_options_record_selector", "{{ parameters['name'] }}", "lists")], diff --git a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/testing_components.py b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/testing_components.py index 9b8e33895460..db85283b7c0f 100644 --- a/airbyte-cdk/python/unit_tests/sources/declarative/parsers/testing_components.py +++ b/airbyte-cdk/python/unit_tests/sources/declarative/parsers/testing_components.py @@ -2,7 +2,7 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import List, Optional from airbyte_cdk.sources.declarative.extractors import DpathExtractor @@ -18,7 +18,7 @@ class TestingSomeComponent(DefaultErrorHandler): A basic test class with various field permutations used to test manifests with custom components """ - subcomponent_field_with_hint: DpathExtractor = DpathExtractor(field_path=[], config={}, parameters={}) + subcomponent_field_with_hint: DpathExtractor = field(default_factory=lambda: DpathExtractor(field_path=[], config={}, parameters={})) basic_field: str = "" optional_subcomponent_field: Optional[RequestOption] = None list_of_subcomponents: List[RequestOption] = None diff --git a/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/destination.py b/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/destination.py index 55b3279ebfc2..5dcc392ab57e 100644 --- a/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/destination.py +++ b/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/destination.py @@ -1,6 +1,4 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. import datetime import json @@ -13,10 +11,12 @@ import duckdb import pyarrow as pa + from airbyte_cdk import AirbyteLogger from airbyte_cdk.destinations import Destination from airbyte_cdk.models import AirbyteConnectionStatus, AirbyteMessage, ConfiguredAirbyteCatalog, DestinationSyncMode, Status, Type + logger = getLogger("airbyte") CONFIG_MOTHERDUCK_API_KEY = "motherduck_api_key" diff --git a/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/run.py b/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/run.py index 9eb783fd1908..95e32dee3d59 100644 --- a/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/run.py +++ b/airbyte-integrations/connectors/destination-duckdb/destination_duckdb/run.py @@ -1,6 +1,4 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. import sys diff --git a/airbyte-integrations/connectors/destination-duckdb/integration_tests/integration_test.py b/airbyte-integrations/connectors/destination-duckdb/integration_tests/integration_test.py index a456c6856040..d985da9706db 100644 --- a/airbyte-integrations/connectors/destination-duckdb/integration_tests/integration_test.py +++ b/airbyte-integrations/connectors/destination-duckdb/integration_tests/integration_test.py @@ -1,6 +1,4 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. from __future__ import annotations @@ -112,7 +110,7 @@ def table_schema() -> str: @pytest.fixture def configured_catalogue( - test_table_name: str, test_large_table_name: str, table_schema: str, + test_table_name: str, test_large_table_name: str, table_schema: str, ) -> ConfiguredAirbyteCatalog: append_stream = ConfiguredAirbyteStream( stream=AirbyteStream( @@ -261,17 +259,17 @@ def _airbyte_messages_with_inconsistent_json_fields(n: int, batch_size: int, tab record=AirbyteRecordMessage( stream=table_name, # Throw in empty nested objects and see how pyarrow deals with them. - data={"key1": fake.first_name() , - "key2": fake.ssn() if random.random()< 0.5 else random.randrange(1000,9999999999999), - "nested1": {} if random.random()< 0.1 else { - "key3": fake.first_name() , - "key4": fake.ssn() if random.random()< 0.5 else random.randrange(1000,9999999999999), - "dictionary1":{} if random.random()< 0.1 else { - "key3": fake.first_name() , + data={"key1": fake.first_name(), + "key2": fake.ssn() if random.random()< 0.5 else random.randrange(1000,9999999999999), + "nested1": {} if random.random()< 0.1 else { + "key3": fake.first_name(), + "key4": fake.ssn() if random.random()< 0.5 else random.randrange(1000,9999999999999), + "dictionary1":{} if random.random()< 0.1 else { + "key3": fake.first_name(), "key4": "True" if random.random() < 0.5 else True } } - } + } if random.random() < 0.9 else {}, emitted_at=int(datetime.now().timestamp()) * 1000, @@ -284,8 +282,8 @@ def _airbyte_messages_with_inconsistent_json_fields(n: int, batch_size: int, tab BATCH_WRITE_SIZE = 1000 @pytest.mark.slow -@pytest.mark.parametrize("airbyte_message_generator,explanation", - [(_airbyte_messages, "Test writing a large number of simple json objects."), +@pytest.mark.parametrize("airbyte_message_generator,explanation", + [(_airbyte_messages, "Test writing a large number of simple json objects."), (_airbyte_messages_with_inconsistent_json_fields, "Test writing a large number of json messages with inconsistent schema.")] ) def test_large_number_of_writes( config: Dict[str, str], diff --git a/airbyte-integrations/connectors/destination-duckdb/metadata.yaml b/airbyte-integrations/connectors/destination-duckdb/metadata.yaml index a9e7cb5495c3..cb98c014b849 100644 --- a/airbyte-integrations/connectors/destination-duckdb/metadata.yaml +++ b/airbyte-integrations/connectors/destination-duckdb/metadata.yaml @@ -4,7 +4,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 94bd199c-2ff0-4aa2-b98e-17f0acb72610 - dockerImageTag: 0.3.6 + dockerImageTag: 0.4.0 dockerRepository: airbyte/destination-duckdb githubIssueLabel: destination-duckdb icon: duckdb.svg @@ -15,16 +15,21 @@ data: enabled: true oss: enabled: true - releaseStage: alpha + releaseStage: beta releases: breakingChanges: 0.3.0: - message: - "This version uses the DuckDB 0.9.1 database driver, which is not - backwards compatible with prior versions. MotherDuck users can upgrade their - database by visiting https://app.motherduck.com/ and accepting the upgrade. - For more information, see the connector migration guide." + message: > + This version uses the DuckDB 0.9.1 database driver, which is not + backwards compatible with prior versions. MotherDuck users can + upgrade their database by visiting https://app.motherduck.com/ and + accepting the upgrade. For more information, see the connector + migration guide. upgradeDeadline: "2023-10-31" + 0.4.0: + message: > + This version uses the DuckDB 0.10.3 database driver, which in not backwards compatible with databases created using the 0.9.x versions of DuckDB. MotherDuck users can upgrade their database by visiting https://app.motherduck.com/ and accepting the upgrade. For more information, see the connector migration guide. + upgradeDeadline: "2024-06-30" resourceRequirements: jobSpecific: - jobType: check_connection diff --git a/airbyte-integrations/connectors/destination-duckdb/poetry.lock b/airbyte-integrations/connectors/destination-duckdb/poetry.lock index 2034bb0b0467..72168715425e 100644 --- a/airbyte-integrations/connectors/destination-duckdb/poetry.lock +++ b/airbyte-integrations/connectors/destination-duckdb/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "airbyte-cdk" @@ -336,6 +336,70 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.5.3" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a6519d917abb15e12380406d721e37613e2a67d166f9fb7e5a8ce0375744cd45"}, + {file = "coverage-7.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aea7da970f1feccf48be7335f8b2ca64baf9b589d79e05b9397a06696ce1a1ec"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:923b7b1c717bd0f0f92d862d1ff51d9b2b55dbbd133e05680204465f454bb286"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62bda40da1e68898186f274f832ef3e759ce929da9a9fd9fcf265956de269dbc"}, + {file = "coverage-7.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8b7339180d00de83e930358223c617cc343dd08e1aa5ec7b06c3a121aec4e1d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:25a5caf742c6195e08002d3b6c2dd6947e50efc5fc2c2205f61ecb47592d2d83"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:05ac5f60faa0c704c0f7e6a5cbfd6f02101ed05e0aee4d2822637a9e672c998d"}, + {file = "coverage-7.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:239a4e75e09c2b12ea478d28815acf83334d32e722e7433471fbf641c606344c"}, + {file = "coverage-7.5.3-cp310-cp310-win32.whl", hash = "sha256:a5812840d1d00eafae6585aba38021f90a705a25b8216ec7f66aebe5b619fb84"}, + {file = "coverage-7.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:33ca90a0eb29225f195e30684ba4a6db05dbef03c2ccd50b9077714c48153cac"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81bc26d609bf0fbc622c7122ba6307993c83c795d2d6f6f6fd8c000a770d974"}, + {file = "coverage-7.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7cec2af81f9e7569280822be68bd57e51b86d42e59ea30d10ebdbb22d2cb7232"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55f689f846661e3f26efa535071775d0483388a1ccfab899df72924805e9e7cd"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50084d3516aa263791198913a17354bd1dc627d3c1639209640b9cac3fef5807"}, + {file = "coverage-7.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:341dd8f61c26337c37988345ca5c8ccabeff33093a26953a1ac72e7d0103c4fb"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ab0b028165eea880af12f66086694768f2c3139b2c31ad5e032c8edbafca6ffc"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5bc5a8c87714b0c67cfeb4c7caa82b2d71e8864d1a46aa990b5588fa953673b8"}, + {file = "coverage-7.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:38a3b98dae8a7c9057bd91fbf3415c05e700a5114c5f1b5b0ea5f8f429ba6614"}, + {file = "coverage-7.5.3-cp311-cp311-win32.whl", hash = "sha256:fcf7d1d6f5da887ca04302db8e0e0cf56ce9a5e05f202720e49b3e8157ddb9a9"}, + {file = "coverage-7.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:8c836309931839cca658a78a888dab9676b5c988d0dd34ca247f5f3e679f4e7a"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:296a7d9bbc598e8744c00f7a6cecf1da9b30ae9ad51c566291ff1314e6cbbed8"}, + {file = "coverage-7.5.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:34d6d21d8795a97b14d503dcaf74226ae51eb1f2bd41015d3ef332a24d0a17b3"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e317953bb4c074c06c798a11dbdd2cf9979dbcaa8ccc0fa4701d80042d4ebf1"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:705f3d7c2b098c40f5b81790a5fedb274113373d4d1a69e65f8b68b0cc26f6db"}, + {file = "coverage-7.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1196e13c45e327d6cd0b6e471530a1882f1017eb83c6229fc613cd1a11b53cd"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:015eddc5ccd5364dcb902eaecf9515636806fa1e0d5bef5769d06d0f31b54523"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fd27d8b49e574e50caa65196d908f80e4dff64d7e592d0c59788b45aad7e8b35"}, + {file = "coverage-7.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:33fc65740267222fc02975c061eb7167185fef4cc8f2770267ee8bf7d6a42f84"}, + {file = "coverage-7.5.3-cp312-cp312-win32.whl", hash = "sha256:7b2a19e13dfb5c8e145c7a6ea959485ee8e2204699903c88c7d25283584bfc08"}, + {file = "coverage-7.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:0bbddc54bbacfc09b3edaec644d4ac90c08ee8ed4844b0f86227dcda2d428fcb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f78300789a708ac1f17e134593f577407d52d0417305435b134805c4fb135adb"}, + {file = "coverage-7.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b368e1aee1b9b75757942d44d7598dcd22a9dbb126affcbba82d15917f0cc155"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f836c174c3a7f639bded48ec913f348c4761cbf49de4a20a956d3431a7c9cb24"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:244f509f126dc71369393ce5fea17c0592c40ee44e607b6d855e9c4ac57aac98"}, + {file = "coverage-7.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4c2872b3c91f9baa836147ca33650dc5c172e9273c808c3c3199c75490e709d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd4b3355b01273a56b20c219e74e7549e14370b31a4ffe42706a8cda91f19f6d"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f542287b1489c7a860d43a7d8883e27ca62ab84ca53c965d11dac1d3a1fab7ce"}, + {file = "coverage-7.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:75e3f4e86804023e991096b29e147e635f5e2568f77883a1e6eed74512659ab0"}, + {file = "coverage-7.5.3-cp38-cp38-win32.whl", hash = "sha256:c59d2ad092dc0551d9f79d9d44d005c945ba95832a6798f98f9216ede3d5f485"}, + {file = "coverage-7.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:fa21a04112c59ad54f69d80e376f7f9d0f5f9123ab87ecd18fbb9ec3a2beed56"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5102a92855d518b0996eb197772f5ac2a527c0ec617124ad5242a3af5e25f85"}, + {file = "coverage-7.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d1da0a2e3b37b745a2b2a678a4c796462cf753aebf94edcc87dcc6b8641eae31"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8383a6c8cefba1b7cecc0149415046b6fc38836295bc4c84e820872eb5478b3d"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9aad68c3f2566dfae84bf46295a79e79d904e1c21ccfc66de88cd446f8686341"}, + {file = "coverage-7.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e079c9ec772fedbade9d7ebc36202a1d9ef7291bc9b3a024ca395c4d52853d7"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bde997cac85fcac227b27d4fb2c7608a2c5f6558469b0eb704c5726ae49e1c52"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:990fb20b32990b2ce2c5f974c3e738c9358b2735bc05075d50a6f36721b8f303"}, + {file = "coverage-7.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3d5a67f0da401e105753d474369ab034c7bae51a4c31c77d94030d59e41df5bd"}, + {file = "coverage-7.5.3-cp39-cp39-win32.whl", hash = "sha256:e08c470c2eb01977d221fd87495b44867a56d4d594f43739a8028f8646a51e0d"}, + {file = "coverage-7.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:1d2a830ade66d3563bb61d1e3c77c8def97b30ed91e166c67d0632c018f380f0"}, + {file = "coverage-7.5.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:3538d8fb1ee9bdd2e2692b3b18c22bb1c19ffbefd06880f5ac496e42d7bb3884"}, + {file = "coverage-7.5.3.tar.gz", hash = "sha256:04aefca5190d1dc7a53a4c1a5a7f8568811306d7a8ee231c42fb69215571944f"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "deprecated" version = "1.2.14" @@ -366,61 +430,69 @@ files = [ [[package]] name = "duckdb" -version = "0.9.2" -description = "DuckDB embedded database" +version = "0.10.3" +description = "DuckDB in-process database" optional = false python-versions = ">=3.7.0" files = [ - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:aadcea5160c586704c03a8a796c06a8afffbefefb1986601104a60cb0bfdb5ab"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:08215f17147ed83cbec972175d9882387366de2ed36c21cbe4add04b39a5bcb4"}, - {file = "duckdb-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee6c2a8aba6850abef5e1be9dbc04b8e72a5b2c2b67f77892317a21fae868fe7"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff49f3da9399900fd58b5acd0bb8bfad22c5147584ad2427a78d937e11ec9d0"}, - {file = "duckdb-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5ac5baf8597efd2bfa75f984654afcabcd698342d59b0e265a0bc6f267b3f0"}, - {file = "duckdb-0.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:81c6df905589a1023a27e9712edb5b724566587ef280a0c66a7ec07c8083623b"}, - {file = "duckdb-0.9.2-cp310-cp310-win32.whl", hash = "sha256:a298cd1d821c81d0dec8a60878c4b38c1adea04a9675fb6306c8f9083bbf314d"}, - {file = "duckdb-0.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:492a69cd60b6cb4f671b51893884cdc5efc4c3b2eb76057a007d2a2295427173"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:061a9ea809811d6e3025c5de31bc40e0302cfb08c08feefa574a6491e882e7e8"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a43f93be768af39f604b7b9b48891f9177c9282a408051209101ff80f7450d8f"}, - {file = "duckdb-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac29c8c8f56fff5a681f7bf61711ccb9325c5329e64f23cb7ff31781d7b50773"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b14d98d26bab139114f62ade81350a5342f60a168d94b27ed2c706838f949eda"}, - {file = "duckdb-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:796a995299878913e765b28cc2b14c8e44fae2f54ab41a9ee668c18449f5f833"}, - {file = "duckdb-0.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6cb64ccfb72c11ec9c41b3cb6181b6fd33deccceda530e94e1c362af5f810ba1"}, - {file = "duckdb-0.9.2-cp311-cp311-win32.whl", hash = "sha256:930740cb7b2cd9e79946e1d3a8f66e15dc5849d4eaeff75c8788d0983b9256a5"}, - {file = "duckdb-0.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:c28f13c45006fd525001b2011cdf91fa216530e9751779651e66edc0e446be50"}, - {file = "duckdb-0.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fbce7bbcb4ba7d99fcec84cec08db40bc0dd9342c6c11930ce708817741faeeb"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15a82109a9e69b1891f0999749f9e3265f550032470f51432f944a37cfdc908b"}, - {file = "duckdb-0.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9490fb9a35eb74af40db5569d90df8a04a6f09ed9a8c9caa024998c40e2506aa"}, - {file = "duckdb-0.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:696d5c6dee86c1a491ea15b74aafe34ad2b62dcd46ad7e03b1d00111ca1a8c68"}, - {file = "duckdb-0.9.2-cp37-cp37m-win32.whl", hash = "sha256:4f0935300bdf8b7631ddfc838f36a858c1323696d8c8a2cecbd416bddf6b0631"}, - {file = "duckdb-0.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:0aab900f7510e4d2613263865570203ddfa2631858c7eb8cbed091af6ceb597f"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7d8130ed6a0c9421b135d0743705ea95b9a745852977717504e45722c112bf7a"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:974e5de0294f88a1a837378f1f83330395801e9246f4e88ed3bfc8ada65dcbee"}, - {file = "duckdb-0.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4fbc297b602ef17e579bb3190c94d19c5002422b55814421a0fc11299c0c1100"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1dd58a0d84a424924a35b3772419f8cd78a01c626be3147e4934d7a035a8ad68"}, - {file = "duckdb-0.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11a1194a582c80dfb57565daa06141727e415ff5d17e022dc5f31888a5423d33"}, - {file = "duckdb-0.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:be45d08541002a9338e568dca67ab4f20c0277f8f58a73dfc1435c5b4297c996"}, - {file = "duckdb-0.9.2-cp38-cp38-win32.whl", hash = "sha256:dd6f88aeb7fc0bfecaca633629ff5c986ac966fe3b7dcec0b2c48632fd550ba2"}, - {file = "duckdb-0.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:28100c4a6a04e69aa0f4a6670a6d3d67a65f0337246a0c1a429f3f28f3c40b9a"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ae5bf0b6ad4278e46e933e51473b86b4b932dbc54ff097610e5b482dd125552"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5d0bb845a80aa48ed1fd1d2d285dd352e96dc97f8efced2a7429437ccd1fe1f"}, - {file = "duckdb-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ce262d74a52500d10888110dfd6715989926ec936918c232dcbaddb78fc55b4"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6935240da090a7f7d2666f6d0a5e45ff85715244171ca4e6576060a7f4a1200e"}, - {file = "duckdb-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5cfb93e73911696a98b9479299d19cfbc21dd05bb7ab11a923a903f86b4d06e"}, - {file = "duckdb-0.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:64e3bc01751f31e7572d2716c3e8da8fe785f1cdc5be329100818d223002213f"}, - {file = "duckdb-0.9.2-cp39-cp39-win32.whl", hash = "sha256:6e5b80f46487636368e31b61461940e3999986359a78660a50dfdd17dd72017c"}, - {file = "duckdb-0.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:e6142a220180dbeea4f341708bd5f9501c5c962ce7ef47c1cadf5e8810b4cb13"}, - {file = "duckdb-0.9.2.tar.gz", hash = "sha256:3843afeab7c3fc4a4c0b53686a4cc1d9cdbdadcbb468d60fef910355ecafd447"}, + {file = "duckdb-0.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd25cc8d001c09a19340739ba59d33e12a81ab285b7a6bed37169655e1cefb31"}, + {file = "duckdb-0.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f9259c637b917ca0f4c63887e8d9b35ec248f5d987c886dfc4229d66a791009"}, + {file = "duckdb-0.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b48f5f1542f1e4b184e6b4fc188f497be8b9c48127867e7d9a5f4a3e334f88b0"}, + {file = "duckdb-0.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e327f7a3951ea154bb56e3fef7da889e790bd9a67ca3c36afc1beb17d3feb6d6"}, + {file = "duckdb-0.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d8b20ed67da004b4481973f4254fd79a0e5af957d2382eac8624b5c527ec48c"}, + {file = "duckdb-0.10.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d37680b8d7be04e4709db3a66c8b3eb7ceba2a5276574903528632f2b2cc2e60"}, + {file = "duckdb-0.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d34b86d6a2a6dfe8bb757f90bfe7101a3bd9e3022bf19dbddfa4b32680d26a9"}, + {file = "duckdb-0.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:73b1cb283ca0f6576dc18183fd315b4e487a545667ffebbf50b08eb4e8cdc143"}, + {file = "duckdb-0.10.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d917dde19fcec8cadcbef1f23946e85dee626ddc133e1e3f6551f15a61a03c61"}, + {file = "duckdb-0.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46757e0cf5f44b4cb820c48a34f339a9ccf83b43d525d44947273a585a4ed822"}, + {file = "duckdb-0.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:338c14d8ac53ac4aa9ec03b6f1325ecfe609ceeb72565124d489cb07f8a1e4eb"}, + {file = "duckdb-0.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:651fcb429602b79a3cf76b662a39e93e9c3e6650f7018258f4af344c816dab72"}, + {file = "duckdb-0.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3ae3c73b98b6215dab93cc9bc936b94aed55b53c34ba01dec863c5cab9f8e25"}, + {file = "duckdb-0.10.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56429b2cfe70e367fb818c2be19f59ce2f6b080c8382c4d10b4f90ba81f774e9"}, + {file = "duckdb-0.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b46c02c2e39e3676b1bb0dc7720b8aa953734de4fd1b762e6d7375fbeb1b63af"}, + {file = "duckdb-0.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:bcd460feef56575af2c2443d7394d405a164c409e9794a4d94cb5fdaa24a0ba4"}, + {file = "duckdb-0.10.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e229a7c6361afbb0d0ab29b1b398c10921263c52957aefe3ace99b0426fdb91e"}, + {file = "duckdb-0.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:732b1d3b6b17bf2f32ea696b9afc9e033493c5a3b783c292ca4b0ee7cc7b0e66"}, + {file = "duckdb-0.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5380d4db11fec5021389fb85d614680dc12757ef7c5881262742250e0b58c75"}, + {file = "duckdb-0.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:468a4e0c0b13c55f84972b1110060d1b0f854ffeb5900a178a775259ec1562db"}, + {file = "duckdb-0.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa1e7ff8d18d71defa84e79f5c86aa25d3be80d7cb7bc259a322de6d7cc72da"}, + {file = "duckdb-0.10.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ed1063ed97c02e9cf2e7fd1d280de2d1e243d72268330f45344c69c7ce438a01"}, + {file = "duckdb-0.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:22f2aad5bb49c007f3bfcd3e81fdedbc16a2ae41f2915fc278724ca494128b0c"}, + {file = "duckdb-0.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:8f9e2bb00a048eb70b73a494bdc868ce7549b342f7ffec88192a78e5a4e164bd"}, + {file = "duckdb-0.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6c2fc49875b4b54e882d68703083ca6f84b27536d57d623fc872e2f502b1078"}, + {file = "duckdb-0.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a66c125d0c30af210f7ee599e7821c3d1a7e09208196dafbf997d4e0cfcb81ab"}, + {file = "duckdb-0.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d99dd7a1d901149c7a276440d6e737b2777e17d2046f5efb0c06ad3b8cb066a6"}, + {file = "duckdb-0.10.3-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ec3bbdb209e6095d202202893763e26c17c88293b88ef986b619e6c8b6715bd"}, + {file = "duckdb-0.10.3-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:2b3dec4ef8ed355d7b7230b40950b30d0def2c387a2e8cd7efc80b9d14134ecf"}, + {file = "duckdb-0.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:04129f94fb49bba5eea22f941f0fb30337f069a04993048b59e2811f52d564bc"}, + {file = "duckdb-0.10.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d75d67024fc22c8edfd47747c8550fb3c34fb1cbcbfd567e94939ffd9c9e3ca7"}, + {file = "duckdb-0.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f3796e9507c02d0ddbba2e84c994fae131da567ce3d9cbb4cbcd32fadc5fbb26"}, + {file = "duckdb-0.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:78e539d85ebd84e3e87ec44d28ad912ca4ca444fe705794e0de9be3dd5550c11"}, + {file = "duckdb-0.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a99b67ac674b4de32073e9bc604b9c2273d399325181ff50b436c6da17bf00a"}, + {file = "duckdb-0.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1209a354a763758c4017a1f6a9f9b154a83bed4458287af9f71d84664ddb86b6"}, + {file = "duckdb-0.10.3-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3b735cea64aab39b67c136ab3a571dbf834067f8472ba2f8bf0341bc91bea820"}, + {file = "duckdb-0.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:816ffb9f758ed98eb02199d9321d592d7a32a6cb6aa31930f4337eb22cfc64e2"}, + {file = "duckdb-0.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:1631184b94c3dc38b13bce4045bf3ae7e1b0ecbfbb8771eb8d751d8ffe1b59b3"}, + {file = "duckdb-0.10.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb98c35fc8dd65043bc08a2414dd9f59c680d7e8656295b8969f3f2061f26c52"}, + {file = "duckdb-0.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e75c9f5b6a92b2a6816605c001d30790f6d67ce627a2b848d4d6040686efdf9"}, + {file = "duckdb-0.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae786eddf1c2fd003466e13393b9348a44b6061af6fe7bcb380a64cac24e7df7"}, + {file = "duckdb-0.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9387da7b7973707b0dea2588749660dd5dd724273222680e985a2dd36787668"}, + {file = "duckdb-0.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:538f943bf9fa8a3a7c4fafa05f21a69539d2c8a68e557233cbe9d989ae232899"}, + {file = "duckdb-0.10.3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6930608f35025a73eb94252964f9f19dd68cf2aaa471da3982cf6694866cfa63"}, + {file = "duckdb-0.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:03bc54a9cde5490918aad82d7d2a34290e3dfb78d5b889c6626625c0f141272a"}, + {file = "duckdb-0.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:372b6e3901d85108cafe5df03c872dfb6f0dbff66165a0cf46c47246c1957aa0"}, + {file = "duckdb-0.10.3.tar.gz", hash = "sha256:c5bd84a92bc708d3a6adffe1f554b94c6e76c795826daaaf482afc3d9c636971"}, ] [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -507,13 +579,13 @@ six = "*" [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -625,38 +697,38 @@ files = [ [[package]] name = "mypy" -version = "1.9.0" +version = "1.10.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"}, - {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"}, - {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"}, - {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"}, - {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"}, - {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"}, - {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"}, - {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"}, - {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"}, - {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"}, - {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"}, - {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"}, - {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"}, - {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"}, - {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"}, - {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"}, - {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"}, - {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"}, - {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"}, - {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"}, - {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"}, - {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"}, - {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, + {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, + {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, + {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, + {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, + {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, + {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, + {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, + {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, + {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, + {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, + {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, + {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, ] [package.dependencies] @@ -843,28 +915,29 @@ test = ["time-machine (>=2.6.0)"] [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -1060,7 +1133,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1068,16 +1140,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1094,7 +1158,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1102,7 +1165,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1110,13 +1172,13 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.3" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] @@ -1187,19 +1249,18 @@ files = [ [[package]] name = "setuptools" -version = "69.5.1" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1225,13 +1286,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, + {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, ] [[package]] @@ -1371,20 +1432,20 @@ files = [ [[package]] name = "zipp" -version = "3.18.1" +version = "3.19.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, - {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, + {file = "zipp-3.19.0-py3-none-any.whl", hash = "sha256:96dc6ad62f1441bcaccef23b274ec471518daf4fbbc580341204936a5a3dddec"}, + {file = "zipp-3.19.0.tar.gz", hash = "sha256:952df858fb3164426c976d9338d3961e8e8b3758e2e059e0f754b8c4262625ee"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] [metadata] lock-version = "2.0" python-versions = ">=3.8" -content-hash = "6555ea415b042916951b557317c02dd5057ca96fe76176bd6e6b45ff8fda5a3b" +content-hash = "1448d533e55aeaaba576b111c60ffce57c417fafd367402947d0f14b4ee685de" diff --git a/airbyte-integrations/connectors/destination-duckdb/pyproject.toml b/airbyte-integrations/connectors/destination-duckdb/pyproject.toml index 0a7387667f43..ca109ea761ee 100644 --- a/airbyte-integrations/connectors/destination-duckdb/pyproject.toml +++ b/airbyte-integrations/connectors/destination-duckdb/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "destination-duckdb" -version = "0.3.6" +version = "0.4.0" description = "Destination implementation for Duckdb." authors = ["Simon Späti, Airbyte"] license = "MIT" @@ -9,7 +9,7 @@ readme = "README.md" [tool.poetry.dependencies] python = ">=3.8" airbyte-cdk = "^0.51.6" -duckdb = "0.9.2" +duckdb = "0.10.3" pyarrow = "15.0.2" [tool.poetry.group.dev.dependencies] @@ -18,10 +18,25 @@ ruff = "^0.0.286" black = "^23.7.0" mypy = "^1.5.1" faker = "24.4.0" +coverage = "^7.5.3" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" +[tool.poe.tasks] +test = { shell = "pytest" } + +coverage = { shell = "coverage run -m pytest && coverage report" } +coverage-report = { shell = "coverage report" } +coverage-html = { shell = "coverage html -d htmlcov && open htmlcov/index.html" } +coverage-reset = { shell = "coverage erase" } + +check = { shell = "ruff check . && mypy ." } + +fix = { shell = "ruff format . && ruff check --fix -s || ruff format ." } +fix-unsafe = { shell = "ruff format . && ruff check --fix --unsafe-fixes . && ruff format ." } +fix-and-check = { shell = "poe fix && poe check" } + [tool.poetry.scripts] destination-duckdb = "destination_duckdb.run:run" diff --git a/airbyte-integrations/connectors/destination-duckdb/unit_tests/destination_unit_tests.py b/airbyte-integrations/connectors/destination-duckdb/unit_tests/destination_unit_tests.py new file mode 100644 index 000000000000..4f5aeefa09b7 --- /dev/null +++ b/airbyte-integrations/connectors/destination-duckdb/unit_tests/destination_unit_tests.py @@ -0,0 +1,51 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. +from __future__ import annotations + +import os +import tempfile +from unittest.mock import Mock, patch +import pytest +from destination_duckdb.destination import CONFIG_DEFAULT_SCHEMA, DestinationDuckdb, validated_sql_name + +from airbyte_cdk.models import AirbyteMessage, ConfiguredAirbyteCatalog, Status, Type + + +def test_validated_sql_name() -> None: + assert validated_sql_name("valid_name") == "valid_name" + with pytest.raises(ValueError): + validated_sql_name("invalid-name") + +@patch("duckdb.connect") +@patch("os.makedirs") +def test_check(mock_connect, mock_makedirs) -> None: + mock_connect.return_value.execute.return_value = True + logger = Mock() + temp_dir = tempfile.mkdtemp() + config = {"destination_path": "/local/test"} + # config = {"destination_path": f"{temp_dir}/testdb.db"} + destination = DestinationDuckdb() + result = destination.check(logger, config) + assert result.status == Status.SUCCEEDED + +@patch("duckdb.connect") +@patch("os.makedirs") +def test_check_failure(mock_connect, mock_makedirs) -> None: + mock_connect.side_effect = Exception("Test exception") + logger = Mock() + config = {"destination_path": "/local/test"} + destination = DestinationDuckdb() + result = destination.check(logger, config) + assert result.status == Status.FAILED + assert "Test exception" in result.message + +@patch("duckdb.connect") +@patch("os.makedirs") +def test_write(mock_connect, mock_makedirs) -> None: + mock_connect.return_value.execute.return_value = True + config = {"destination_path": "/local/test", "schema": CONFIG_DEFAULT_SCHEMA} + catalog = ConfiguredAirbyteCatalog(streams=[]) + messages = [AirbyteMessage(type=Type.STATE, record=None)] + destination = DestinationDuckdb() + result = list(destination.write(config, catalog, messages)) + assert len(result) == 1 + assert result[0].type == Type.STATE diff --git a/airbyte-integrations/connectors/destination-duckdb/unit_tests/unit_test.py b/airbyte-integrations/connectors/destination-duckdb/unit_tests/unit_test.py index f7dcb2c361cc..393e99094619 100644 --- a/airbyte-integrations/connectors/destination-duckdb/unit_tests/unit_test.py +++ b/airbyte-integrations/connectors/destination-duckdb/unit_tests/unit_test.py @@ -1,6 +1,4 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. import pytest from destination_duckdb.destination import DestinationDuckdb, validated_sql_name diff --git a/airbyte-integrations/connectors/destination-snowflake/build.gradle b/airbyte-integrations/connectors/destination-snowflake/build.gradle index 61f54b0647cd..10d31f47c343 100644 --- a/airbyte-integrations/connectors/destination-snowflake/build.gradle +++ b/airbyte-integrations/connectors/destination-snowflake/build.gradle @@ -3,7 +3,7 @@ plugins { } airbyteJavaConnector { - cdkVersionRequired = '0.35.13' + cdkVersionRequired = '0.35.14' features = ['db-destinations', 's3-destinations', 'typing-deduping'] useLocalCdk = false } @@ -15,12 +15,6 @@ java { } } -compileKotlin { - compilerOptions { - allWarningsAsErrors = false - } -} - application { mainClass = 'io.airbyte.integrations.destination.snowflake.SnowflakeDestinationKt' // enable when profiling diff --git a/airbyte-integrations/connectors/destination-snowflake/metadata.yaml b/airbyte-integrations/connectors/destination-snowflake/metadata.yaml index 6f703dd54e1e..06fbb8a5f156 100644 --- a/airbyte-integrations/connectors/destination-snowflake/metadata.yaml +++ b/airbyte-integrations/connectors/destination-snowflake/metadata.yaml @@ -5,7 +5,7 @@ data: connectorSubtype: database connectorType: destination definitionId: 424892c4-daac-4491-b35d-c6688ba547ba - dockerImageTag: 3.8.4 + dockerImageTag: 3.9.0 dockerRepository: airbyte/destination-snowflake documentationUrl: https://docs.airbyte.com/integrations/destinations/snowflake githubIssueLabel: destination-snowflake diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDatabaseUtils.kt similarity index 79% rename from airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.kt rename to airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDatabaseUtils.kt index ed0e517b7b68..cb8ce1f2ffd2 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDatabase.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDatabaseUtils.kt @@ -8,7 +8,9 @@ import com.zaxxer.hikari.HikariDataSource import io.airbyte.cdk.db.jdbc.DefaultJdbcDatabase import io.airbyte.cdk.db.jdbc.JdbcDatabase import io.airbyte.cdk.db.jdbc.JdbcUtils +import io.airbyte.commons.exceptions.ConfigErrorException import io.airbyte.commons.json.Jsons.deserialize +import io.airbyte.integrations.base.destination.typing_deduping.AirbyteProtocolType import java.io.IOException import java.io.PrintWriter import java.net.URI @@ -23,12 +25,13 @@ import java.util.* import java.util.concurrent.TimeUnit import java.util.stream.Collectors import javax.sql.DataSource +import net.snowflake.client.jdbc.SnowflakeSQLException import org.slf4j.Logger import org.slf4j.LoggerFactory /** SnowflakeDatabase contains helpers to create connections to and run queries on Snowflake. */ -object SnowflakeDatabase { - private val LOGGER: Logger = LoggerFactory.getLogger(SnowflakeDatabase::class.java) +object SnowflakeDatabaseUtils { + private val LOGGER: Logger = LoggerFactory.getLogger(SnowflakeDatabaseUtils::class.java) private const val PAUSE_BETWEEN_TOKEN_REFRESH_MIN = 7 // snowflake access token TTL is 10min and can't be modified @@ -42,14 +45,20 @@ object SnowflakeDatabase { .version(HttpClient.Version.HTTP_2) .connectTimeout(Duration.ofSeconds(10)) .build() - const val PRIVATE_KEY_FILE_NAME: String = "rsa_key.p8" - const val PRIVATE_KEY_FIELD_NAME: String = "private_key" - const val PRIVATE_KEY_PASSWORD: String = "private_key_password" + private const val PRIVATE_KEY_FILE_NAME: String = "rsa_key.p8" + private const val PRIVATE_KEY_FIELD_NAME: String = "private_key" + private const val PRIVATE_KEY_PASSWORD: String = "private_key_password" private const val CONNECTION_STRING_IDENTIFIER_KEY = "application" private const val CONNECTION_STRING_IDENTIFIER_VAL = "Airbyte_Connector" + // This is an unfortunately fragile way to capture the errors, but Snowflake doesn't + // provide a more specific permission exception error code + private const val NO_PRIVILEGES_ERROR_MESSAGE = "but current role has no privileges on it" + private const val IP_NOT_IN_WHITE_LIST_ERR_MSG = "not allowed to access Snowflake" + @JvmStatic fun createDataSource(config: JsonNode, airbyteEnvironment: String?): HikariDataSource { + val dataSource = HikariDataSource() val jdbcUrl = @@ -243,4 +252,45 @@ object SnowflakeDatabase { } } } + + fun checkForKnownConfigExceptions(e: Exception?): Optional { + if (e is SnowflakeSQLException && e.message!!.contains(NO_PRIVILEGES_ERROR_MESSAGE)) { + return Optional.of( + ConfigErrorException( + "Encountered Error with Snowflake Configuration: Current role does not have permissions on the target schema please verify your privileges", + e + ) + ) + } + if (e is SnowflakeSQLException && e.message!!.contains(IP_NOT_IN_WHITE_LIST_ERR_MSG)) { + return Optional.of( + ConfigErrorException( + """ + Snowflake has blocked access from Airbyte IP address. Please make sure that your Snowflake user account's + network policy allows access from all Airbyte IP addresses. See this page for the list of Airbyte IPs: + https://docs.airbyte.com/cloud/getting-started-with-airbyte-cloud#allowlist-ip-addresses and this page + for documentation on Snowflake network policies: https://docs.snowflake.com/en/user-guide/network-policies + + """.trimIndent(), + e + ) + ) + } + return Optional.empty() + } + + fun toSqlTypeName(airbyteProtocolType: AirbyteProtocolType): String { + return when (airbyteProtocolType) { + AirbyteProtocolType.STRING -> "TEXT" + AirbyteProtocolType.NUMBER -> "FLOAT" + AirbyteProtocolType.INTEGER -> "NUMBER" + AirbyteProtocolType.BOOLEAN -> "BOOLEAN" + AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE -> "TIMESTAMP_TZ" + AirbyteProtocolType.TIMESTAMP_WITHOUT_TIMEZONE -> "TIMESTAMP_NTZ" + AirbyteProtocolType.TIME_WITH_TIMEZONE -> "TEXT" + AirbyteProtocolType.TIME_WITHOUT_TIMEZONE -> "TIME" + AirbyteProtocolType.DATE -> "DATE" + AirbyteProtocolType.UNKNOWN -> "VARIANT" + } + } } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.kt index a539c804dcfd..49ea267c24e8 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestination.kt @@ -17,22 +17,34 @@ import io.airbyte.cdk.integrations.base.SerializedAirbyteMessageConsumer import io.airbyte.cdk.integrations.base.TypingAndDedupingFlag.getRawNamespaceOverride import io.airbyte.cdk.integrations.base.adaptive.AdaptiveDestinationRunner import io.airbyte.cdk.integrations.destination.NamingConventionTransformer -import io.airbyte.cdk.integrations.destination.jdbc.JdbcCheckOperations -import io.airbyte.cdk.integrations.destination.staging.StagingConsumerFactory.Companion.builder +import io.airbyte.cdk.integrations.destination.StreamSyncSummary +import io.airbyte.cdk.integrations.destination.async.AsyncStreamConsumer +import io.airbyte.cdk.integrations.destination.async.buffers.BufferManager +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage +import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteRecordMessage +import io.airbyte.cdk.integrations.destination.operation.SyncOperation +import io.airbyte.cdk.integrations.destination.s3.FileUploadFormat +import io.airbyte.cdk.integrations.destination.staging.operation.StagingStreamOperations +import io.airbyte.integrations.base.destination.operation.DefaultFlush +import io.airbyte.integrations.base.destination.operation.DefaultSyncOperation import io.airbyte.integrations.base.destination.typing_deduping.CatalogParser -import io.airbyte.integrations.base.destination.typing_deduping.DefaultTyperDeduper -import io.airbyte.integrations.base.destination.typing_deduping.NoOpTyperDeduperWithV1V2Migrations +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus +import io.airbyte.integrations.base.destination.typing_deduping.InitialRawTableStatus import io.airbyte.integrations.base.destination.typing_deduping.ParsedCatalog -import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduper +import io.airbyte.integrations.base.destination.typing_deduping.Sql +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration +import io.airbyte.integrations.destination.snowflake.migrations.SnowflakeDV2Migration import io.airbyte.integrations.destination.snowflake.migrations.SnowflakeState +import io.airbyte.integrations.destination.snowflake.operation.SnowflakeStagingClient +import io.airbyte.integrations.destination.snowflake.operation.SnowflakeStorageOperation import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeDestinationHandler import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeSqlGenerator -import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeV1V2Migrator -import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeV2TableMigrator import io.airbyte.protocol.models.v0.AirbyteConnectionStatus import io.airbyte.protocol.models.v0.AirbyteMessage +import io.airbyte.protocol.models.v0.AirbyteRecordMessageMeta import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog +import io.airbyte.protocol.models.v0.DestinationSyncMode import java.util.* import java.util.concurrent.Executors import java.util.concurrent.ScheduledExecutorService @@ -51,38 +63,108 @@ constructor( private val airbyteEnvironment: String, private val nameTransformer: NamingConventionTransformer = SnowflakeSQLNameTransformer(), ) : BaseConnector(), Destination { + private val destinationColumns = JavaBaseConstants.DestinationColumns.V2_WITHOUT_META override fun check(config: JsonNode): AirbyteConnectionStatus? { - val snowflakeInternalStagingSqlOperations = - SnowflakeInternalStagingSqlOperations(nameTransformer) val dataSource = getDataSource(config) try { + val retentionPeriodDays = 1 + val sqlGenerator = SnowflakeSqlGenerator(retentionPeriodDays) val database = getDatabase(dataSource) - val outputSchema = nameTransformer.getIdentifier(config["schema"].asText()) - JdbcCheckOperations.attemptTableOperations( - outputSchema, - database, - nameTransformer, - snowflakeInternalStagingSqlOperations, - true, - ) - attemptStageOperations( - outputSchema, - database, - nameTransformer, - snowflakeInternalStagingSqlOperations + val databaseName = config[JdbcUtils.DATABASE_KEY].asText() + val outputSchema = nameTransformer.getIdentifier(config[JdbcUtils.SCHEMA_KEY].asText()) + val rawTableSchemaName: String = + if (getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE).isPresent) { + getRawNamespaceOverride(RAW_SCHEMA_OVERRIDE).get() + } else { + JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE + } + val finalTableName = + nameTransformer.getIdentifier( + "_airbyte_connection_test_" + + UUID.randomUUID().toString().replace("-".toRegex(), "") + ) + val snowflakeDestinationHandler = + SnowflakeDestinationHandler(databaseName, database, rawTableSchemaName) + val snowflakeStagingClient = SnowflakeStagingClient(database) + val snowflakeStorageOperation = + SnowflakeStorageOperation( + sqlGenerator = sqlGenerator, + destinationHandler = snowflakeDestinationHandler, + retentionPeriodDays, + snowflakeStagingClient + ) + val streamId = + sqlGenerator.buildStreamId(outputSchema, finalTableName, rawTableSchemaName) + val streamConfig = + StreamConfig( + id = streamId, + destinationSyncMode = DestinationSyncMode.OVERWRITE, + primaryKey = listOf(), + cursor = Optional.empty(), + columns = linkedMapOf(), + generationId = 0, + minimumGenerationId = 0, + syncId = 0 + ) + // None of the fields in destination initial status matter + // for a dummy sync with type-dedupe disabled. We only look at these + // when we perform final table related setup operations. + // We just need the streamId to perform the calls in streamOperation. + val initialStatus = + DestinationInitialStatus( + streamConfig = streamConfig, + isFinalTablePresent = false, + initialRawTableStatus = + InitialRawTableStatus( + rawTableExists = false, + hasUnprocessedRecords = true, + maxProcessedTimestamp = Optional.empty() + ), + isSchemaMismatch = true, + isFinalTableEmpty = true, + destinationState = SnowflakeState(false) + ) + // We simulate a mini-sync to see the raw table code path is exercised. and disable T+D + snowflakeDestinationHandler.createNamespaces(setOf(rawTableSchemaName, outputSchema)) + + val streamOperation: StagingStreamOperations = + StagingStreamOperations( + snowflakeStorageOperation, + initialStatus, + FileUploadFormat.CSV, + destinationColumns, + disableTypeDedupe = true + ) + // Dummy message + val data = """ + {"testKey": "testValue"} + """.trimIndent() + val message = + PartialAirbyteMessage() + .withSerialized(data) + .withRecord( + PartialAirbyteRecordMessage() + .withEmittedAt(System.currentTimeMillis()) + .withMeta( + AirbyteRecordMessageMeta(), + ), + ) + streamOperation.writeRecords(streamConfig, listOf(message).stream()) + streamOperation.finalizeTable(streamConfig, StreamSyncSummary.DEFAULT) + // clean up the raw table, this is intentionally not part of actual sync code + // because we avoid dropping original tables directly. + snowflakeDestinationHandler.execute( + Sql.of( + "DROP TABLE IF EXISTS \"${streamId.rawNamespace}\".\"${streamId.rawName}\";", + ), ) return AirbyteConnectionStatus().withStatus(AirbyteConnectionStatus.Status.SUCCEEDED) } catch (e: Exception) { LOGGER.error("Exception while checking connection: ", e) return AirbyteConnectionStatus() .withStatus(AirbyteConnectionStatus.Status.FAILED) - .withMessage( - """ - Could not connect with provided configuration. - ${e.message} - """.trimIndent(), - ) + .withMessage("Could not connect with provided configuration. ${e.message}") } finally { try { close(dataSource) @@ -93,11 +175,11 @@ constructor( } private fun getDataSource(config: JsonNode): DataSource { - return SnowflakeDatabase.createDataSource(config, airbyteEnvironment) + return SnowflakeDatabaseUtils.createDataSource(config, airbyteEnvironment) } private fun getDatabase(dataSource: DataSource): JdbcDatabase { - return SnowflakeDatabase.getDatabase(dataSource) + return SnowflakeDatabaseUtils.getDatabase(dataSource) } override fun getSerializedMessageConsumer( @@ -115,13 +197,10 @@ constructor( } val retentionPeriodDays = - SnowflakeSqlOperations.getRetentionPeriodDays( - config[SnowflakeSqlOperations.RETENTION_PERIOD_DAYS_CONFIG_KEY], + getRetentionPeriodDays( + config[RETENTION_PERIOD_DAYS], ) - val sqlGenerator = SnowflakeSqlGenerator(retentionPeriodDays) - val parsedCatalog: ParsedCatalog - val typerDeduper: TyperDeduper val database = getDatabase(getDataSource(config)) val databaseName = config[JdbcUtils.DATABASE_KEY].asText() val rawTableSchemaName: String @@ -135,63 +214,59 @@ constructor( } val snowflakeDestinationHandler = SnowflakeDestinationHandler(databaseName, database, rawTableSchemaName) - parsedCatalog = catalogParser.parseCatalog(catalog) - val migrator = SnowflakeV1V2Migrator(this.nameTransformer, database, databaseName) - val v2TableMigrator = - SnowflakeV2TableMigrator( - database, - databaseName, - sqlGenerator, - snowflakeDestinationHandler - ) + val parsedCatalog: ParsedCatalog = catalogParser.parseCatalog(catalog) val disableTypeDedupe = config.has(DISABLE_TYPE_DEDUPE) && config[DISABLE_TYPE_DEDUPE].asBoolean(false) - val migrations = listOf>() - typerDeduper = - if (disableTypeDedupe) { - NoOpTyperDeduperWithV1V2Migrations( + val migrations = + listOf>( + SnowflakeDV2Migration( + nameTransformer, + database, + databaseName, sqlGenerator, - snowflakeDestinationHandler, - parsedCatalog, - migrator, - v2TableMigrator, - migrations - ) - } else { - DefaultTyperDeduper( - sqlGenerator, - snowflakeDestinationHandler, - parsedCatalog, - migrator, - v2TableMigrator, - migrations, - ) - } + ), + ) + + val snowflakeStagingClient = SnowflakeStagingClient(database) + + val snowflakeStorageOperation = + SnowflakeStorageOperation( + sqlGenerator = sqlGenerator, + destinationHandler = snowflakeDestinationHandler, + retentionPeriodDays, + snowflakeStagingClient + ) - return builder( - outputRecordCollector, - database, - SnowflakeInternalStagingSqlOperations(nameTransformer), - nameTransformer, - config, - catalog, - true, - typerDeduper, + val syncOperation: SyncOperation = + DefaultSyncOperation( parsedCatalog, + snowflakeDestinationHandler, defaultNamespace, - JavaBaseConstants.DestinationColumns.V2_WITHOUT_META, + { initialStatus: DestinationInitialStatus, disableTD -> + StagingStreamOperations( + snowflakeStorageOperation, + initialStatus, + FileUploadFormat.CSV, + destinationColumns, + disableTD + ) + }, + migrations, + disableTypeDedupe ) - .setBufferMemoryLimit(Optional.of(snowflakeBufferMemoryLimit)) - .setOptimalBatchSizeBytes( - // The per stream size limit is following recommendations from: - // https://docs.snowflake.com/en/user-guide/data-load-considerations-prepare.html#general-file-sizing-recommendations - // "To optimize the number of parallel operations for a load, - // we recommend aiming to produce data files roughly 100-250 MB (or larger) in size - // compressed." - (200 * 1024 * 1024).toLong(), - ) - .build() - .createAsync() + + return AsyncStreamConsumer( + outputRecordCollector = outputRecordCollector, + onStart = {}, + onClose = { _, streamSyncSummaries -> + syncOperation.finalizeStreams(streamSyncSummaries) + SCHEDULED_EXECUTOR_SERVICE.shutdownNow() + }, + onFlush = DefaultFlush(optimalFlushBatchSize, syncOperation), + catalog = catalog, + bufferManager = BufferManager(snowflakeBufferMemoryLimit), + defaultNamespace = Optional.of(defaultNamespace), + ) } override val isV2Destination: Boolean @@ -209,55 +284,49 @@ constructor( companion object { private val LOGGER: Logger = LoggerFactory.getLogger(SnowflakeDestination::class.java) const val RAW_SCHEMA_OVERRIDE: String = "raw_data_schema" - + const val RETENTION_PERIOD_DAYS: String = "retention_period_days" const val DISABLE_TYPE_DEDUPE: String = "disable_type_dedupe" @JvmField val SCHEDULED_EXECUTOR_SERVICE: ScheduledExecutorService = Executors.newScheduledThreadPool(1) - @Throws(Exception::class) - private fun attemptStageOperations( - outputSchema: String, - database: JdbcDatabase, - namingResolver: NamingConventionTransformer, - sqlOperations: SnowflakeInternalStagingSqlOperations - ) { - // verify we have permissions to create/drop stage - - val outputTableName = - namingResolver.getIdentifier( - "_airbyte_connection_test_" + - UUID.randomUUID().toString().replace("-".toRegex(), "") - ) - val stageName = sqlOperations.getStageName(outputSchema, outputTableName) - sqlOperations.createStageIfNotExists(database, stageName) - - // try to make test write to make sure we have required role - try { - sqlOperations.attemptWriteToStage(outputSchema, stageName, database) - } finally { - // drop created tmp stage - sqlOperations.dropStageIfExists(database, stageName, null) - } + fun getRetentionPeriodDays(node: JsonNode?): Int { + val retentionPeriodDays = + if (node == null || node.isNull) { + 1 + } else { + node.asInt() + } + return retentionPeriodDays } private val snowflakeBufferMemoryLimit: Long get() = (Runtime.getRuntime().maxMemory() * 0.5).toLong() + + // The per stream size limit is following recommendations from: + // https://docs.snowflake.com/en/user-guide/data-load-considerations-prepare.html#general-file-sizing-recommendations + // "To optimize the number of parallel operations for a load, + // we recommend aiming to produce data files roughly 100-250 MB (or larger) in size + // compressed." + private val optimalFlushBatchSize: Long + get() = (200 * 1024 * 1024).toLong() } } fun main(args: Array) { IntegrationRunner.addOrphanedThreadFilter { t: Thread -> - for (stackTraceElement in IntegrationRunner.getThreadCreationInfo(t).stack) { - val stackClassName = stackTraceElement.className - val stackMethodName = stackTraceElement.methodName - if ( - SFStatement::class.java.canonicalName == stackClassName && - "close" == stackMethodName || - SFSession::class.java.canonicalName == stackClassName && - "callHeartBeatWithQueryTimeout" == stackMethodName - ) { - return@addOrphanedThreadFilter false + if (IntegrationRunner.getThreadCreationInfo(t) != null) { + for (stackTraceElement in IntegrationRunner.getThreadCreationInfo(t)!!.stack) { + val stackClassName = stackTraceElement.className + val stackMethodName = stackTraceElement.methodName + if ( + SFStatement::class.java.canonicalName == stackClassName && + "close" == stackMethodName || + SFSession::class.java.canonicalName == stackClassName && + "callHeartBeatWithQueryTimeout" == stackMethodName + ) { + return@addOrphanedThreadFilter false + } } } true @@ -277,5 +346,4 @@ fun main(args: Array) { ) } .run(args) - SnowflakeDestination.SCHEDULED_EXECUTOR_SERVICE.shutdownNow() } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.kt deleted file mode 100644 index 54544a708e1d..000000000000 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.kt +++ /dev/null @@ -1,288 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ -package io.airbyte.integrations.destination.snowflake - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings -import io.airbyte.cdk.db.jdbc.JdbcDatabase -import io.airbyte.cdk.integrations.destination.NamingConventionTransformer -import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer -import io.airbyte.commons.string.Strings.join -import java.io.IOException -import java.sql.SQLException -import java.time.Instant -import java.time.ZoneOffset -import java.time.ZonedDateTime -import java.util.* -import org.slf4j.Logger -import org.slf4j.LoggerFactory - -@SuppressFBWarnings("NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE") -class SnowflakeInternalStagingSqlOperations( - private val nameTransformer: NamingConventionTransformer -) : SnowflakeSqlStagingOperations() { - override fun getStageName(namespace: String?, streamName: String?): String { - return java.lang.String.join( - ".", - '"'.toString() + nameTransformer.convertStreamName(namespace!!) + '"', - '"'.toString() + nameTransformer.convertStreamName(streamName!!) + '"' - ) - } - - override fun getStagingPath( - connectionId: UUID?, - namespace: String?, - streamName: String?, - outputTableName: String?, - writeDatetime: Instant? - ): String? { - // see https://docs.snowflake.com/en/user-guide/data-load-considerations-stage.html - val zonedDateTime = ZonedDateTime.ofInstant(writeDatetime, ZoneOffset.UTC) - return nameTransformer.applyDefaultCase( - String.format( - "%s/%02d/%02d/%02d/%s/", - zonedDateTime.year, - zonedDateTime.monthValue, - zonedDateTime.dayOfMonth, - zonedDateTime.hour, - connectionId - ) - ) - } - - @Throws(IOException::class) - override fun uploadRecordsToStage( - database: JdbcDatabase?, - recordsData: SerializableBuffer?, - namespace: String?, - stageName: String?, - stagingPath: String? - ): String { - val exceptionsThrown: MutableList = ArrayList() - var succeeded = false - while (exceptionsThrown.size < UPLOAD_RETRY_LIMIT && !succeeded) { - try { - uploadRecordsToBucket(database, stageName, stagingPath, recordsData) - succeeded = true - } catch (e: Exception) { - LOGGER.error("Failed to upload records into stage {}", stagingPath, e) - exceptionsThrown.add(e) - } - if (!succeeded) { - LOGGER.info( - "Retrying to upload records into stage {} ({}/{}})", - stagingPath, - exceptionsThrown.size, - UPLOAD_RETRY_LIMIT - ) - } - } - if (!succeeded) { - throw RuntimeException( - String.format( - "Exceptions thrown while uploading records into stage: %s", - join(exceptionsThrown, "\n") - ) - ) - } - LOGGER.info( - "Successfully loaded records to stage {} with {} re-attempt(s)", - stagingPath, - exceptionsThrown.size - ) - return recordsData!!.filename - } - - @Throws(Exception::class) - private fun uploadRecordsToBucket( - database: JdbcDatabase?, - stageName: String?, - stagingPath: String?, - recordsData: SerializableBuffer? - ) { - val query = getPutQuery(stageName, stagingPath, recordsData!!.file!!.absolutePath) - LOGGER.debug("Executing query: {}", query) - database!!.execute(query) - if (!checkStageObjectExists(database, stageName, stagingPath, recordsData.filename)) { - LOGGER.error( - String.format( - "Failed to upload data into stage, object @%s not found", - (stagingPath + "/" + recordsData.filename).replace("/+".toRegex(), "/") - ) - ) - throw RuntimeException("Upload failed") - } - } - - fun getPutQuery(stageName: String?, stagingPath: String?, filePath: String?): String { - return String.format( - PUT_FILE_QUERY, - filePath, - stageName, - stagingPath, - Runtime.getRuntime().availableProcessors() - ) - } - - @Throws(SQLException::class) - private fun checkStageObjectExists( - database: JdbcDatabase?, - stageName: String?, - stagingPath: String?, - filename: String - ): Boolean { - val query = getListQuery(stageName, stagingPath, filename) - LOGGER.debug("Executing query: {}", query) - val result: Boolean - database!!.unsafeQuery(query).use { stream -> result = stream.findAny().isPresent } - return result - } - - /** - * Creates a SQL query to list all files that have been staged - * - * @param stageName name of staging folder - * @param stagingPath path to the files within the staging folder - * @param filename name of the file within staging area - * @return SQL query string - */ - fun getListQuery(stageName: String?, stagingPath: String?, filename: String?): String { - return String.format(LIST_STAGE_QUERY, stageName, stagingPath, filename) - .replace("/+".toRegex(), "/") - } - - @Throws(Exception::class) - override fun createStageIfNotExists(database: JdbcDatabase?, stageName: String?) { - val query = getCreateStageQuery(stageName) - LOGGER.debug("Executing query: {}", query) - try { - database!!.execute(query) - } catch (e: Exception) { - throw checkForKnownConfigExceptions(e).orElseThrow { e } - } - } - - /** - * Creates a SQL query to create a staging folder. This query will create a staging folder if - * one previously did not exist - * - * @param stageName name of the staging folder - * @return SQL query string - */ - fun getCreateStageQuery(stageName: String?): String { - return String.format(CREATE_STAGE_QUERY, stageName) - } - - @Throws(SQLException::class) - override fun copyIntoTableFromStage( - database: JdbcDatabase?, - stageName: String?, - stagingPath: String?, - stagedFiles: List?, - tableName: String?, - schemaName: String? - ) { - try { - val query = getCopyQuery(stageName, stagingPath, stagedFiles, tableName, schemaName) - LOGGER.debug("Executing query: {}", query) - database!!.execute(query) - } catch (e: SQLException) { - throw checkForKnownConfigExceptions(e).orElseThrow { e } - } - } - - /** - * Creates a SQL query to bulk copy data into fully qualified destination table See - * https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html for more context - * - * @param stageName name of staging folder - * @param stagingPath path of staging folder to data files - * @param stagedFiles collection of the staging files - * @param dstTableName name of destination table - * @param schemaName name of schema - * @return SQL query string - */ - fun getCopyQuery( - stageName: String?, - stagingPath: String?, - stagedFiles: List?, - dstTableName: String?, - schemaName: String? - ): String { - return String.format( - COPY_QUERY_1S1T + generateFilesList(stagedFiles!!) + ";", - schemaName, - dstTableName, - stageName, - stagingPath - ) - } - - @Throws(Exception::class) - override fun dropStageIfExists( - database: JdbcDatabase?, - stageName: String?, - stagingPath: String? - ) { - try { - val query = getDropQuery(stageName) - LOGGER.debug("Executing query: {}", query) - database!!.execute(query) - } catch (e: SQLException) { - throw checkForKnownConfigExceptions(e).orElseThrow { e } - } - } - - /** - * Creates a SQL query to drop staging area and all associated files within the staged area - * - * @param stageName name of staging folder - * @return SQL query string - */ - fun getDropQuery(stageName: String?): String { - return String.format(DROP_STAGE_QUERY, stageName) - } - - /** - * Creates a SQL query used to remove staging files that were just staged See - * https://docs.snowflake.com/en/sql-reference/sql/remove.html for more context - * - * @param stageName name of staging folder - * @return SQL query string - */ - fun getRemoveQuery(stageName: String?): String { - return String.format(REMOVE_QUERY, stageName) - } - - companion object { - const val UPLOAD_RETRY_LIMIT: Int = 3 - - private const val CREATE_STAGE_QUERY = - "CREATE STAGE IF NOT EXISTS %s encryption = (type = 'SNOWFLAKE_SSE') copy_options = (on_error='skip_file');" - private const val PUT_FILE_QUERY = "PUT file://%s @%s/%s PARALLEL = %d;" - private const val LIST_STAGE_QUERY = "LIST @%s/%s/%s;" - - // the 1s1t copy query explicitly quotes the raw table+schema name. - // we set error_on_column_count_mismatch because (at time of writing), we haven't yet added - // the airbyte_meta column to the raw table. - // See also https://github.com/airbytehq/airbyte/issues/36410 for improved error handling. - // TODO remove error_on_column_count_mismatch once snowflake has airbyte_meta in raw data. - private val COPY_QUERY_1S1T = - """ - COPY INTO "%s"."%s" FROM '@%s/%s' - file_format = ( - type = csv - compression = auto - field_delimiter = ',' - skip_header = 0 - FIELD_OPTIONALLY_ENCLOSED_BY = '"' - NULL_IF=('') - error_on_column_count_mismatch=false - ) - """.trimIndent() - private const val DROP_STAGE_QUERY = "DROP STAGE IF EXISTS %s;" - private const val REMOVE_QUERY = "REMOVE @%s;" - - private val LOGGER: Logger = LoggerFactory.getLogger(SnowflakeSqlOperations::class.java) - } -} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperations.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperations.kt deleted file mode 100644 index fdbe3ecd6ddc..000000000000 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperations.kt +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ -package io.airbyte.integrations.destination.snowflake - -import com.fasterxml.jackson.databind.JsonNode -import io.airbyte.cdk.db.jdbc.JdbcDatabase -import io.airbyte.cdk.integrations.base.DestinationConfig.Companion.instance -import io.airbyte.cdk.integrations.base.JavaBaseConstants -import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage -import io.airbyte.cdk.integrations.destination.jdbc.JdbcSqlOperations -import io.airbyte.cdk.integrations.destination.jdbc.SqlOperations -import io.airbyte.cdk.integrations.destination.jdbc.SqlOperationsUtils.insertRawRecordsInSingleQuery -import io.airbyte.commons.exceptions.ConfigErrorException -import java.sql.SQLException -import java.util.* -import java.util.function.Consumer -import net.snowflake.client.jdbc.SnowflakeSQLException -import org.slf4j.Logger -import org.slf4j.LoggerFactory - -open class SnowflakeSqlOperations : JdbcSqlOperations(), SqlOperations { - @Throws(Exception::class) - override fun createSchemaIfNotExists(database: JdbcDatabase?, schemaName: String) { - try { - if (!schemaSet.contains(schemaName) && !isSchemaExists(database, schemaName)) { - // 1s1t is assuming a lowercase airbyte_internal schema name, so we need to quote it - database!!.execute(String.format("CREATE SCHEMA IF NOT EXISTS \"%s\";", schemaName)) - schemaSet.add(schemaName) - } - } catch (e: Exception) { - throw checkForKnownConfigExceptions(e).orElseThrow { e } - } - } - - override fun createTableQuery( - database: JdbcDatabase?, - schemaName: String?, - tableName: String? - ): String? { - val retentionPeriodDays = retentionPeriodDaysFromConfigSingleton - return String.format( - """ - CREATE TABLE IF NOT EXISTS "%s"."%s" ( - "%s" VARCHAR PRIMARY KEY, - "%s" TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp(), - "%s" TIMESTAMP WITH TIME ZONE DEFAULT NULL, - "%s" VARIANT - ) data_retention_time_in_days = %d; - """.trimIndent(), - schemaName, - tableName, - JavaBaseConstants.COLUMN_NAME_AB_RAW_ID, - JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT, - JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT, - JavaBaseConstants.COLUMN_NAME_DATA, - retentionPeriodDays - ) - } - - @Throws(Exception::class) - override fun isSchemaExists(database: JdbcDatabase?, outputSchema: String?): Boolean { - try { - database!!.unsafeQuery(SHOW_SCHEMAS).use { results -> - return results - .map { schemas: JsonNode -> schemas[NAME].asText() } - .anyMatch { anObject: String? -> outputSchema.equals(anObject) } - } - } catch (e: Exception) { - throw checkForKnownConfigExceptions(e).orElseThrow { e } - } - } - - override fun truncateTableQuery( - database: JdbcDatabase?, - schemaName: String?, - tableName: String? - ): String { - return String.format("TRUNCATE TABLE \"%s\".\"%s\";\n", schemaName, tableName) - } - - override fun dropTableIfExistsQuery(schemaName: String?, tableName: String?): String { - return String.format("DROP TABLE IF EXISTS \"%s\".\"%s\";\n", schemaName, tableName) - } - - @Throws(SQLException::class) - public override fun insertRecordsInternal( - database: JdbcDatabase, - records: List, - schemaName: String?, - tableName: String? - ) { - LOGGER.info("actual size of batch: {}", records.size) - // Note that the column order is weird here - that's intentional, to avoid needing to change - // SqlOperationsUtils.insertRawRecordsInSingleQuery to support a different column order. - - // snowflake query syntax: - // requires selecting from a set of values in order to invoke the parse_json function. - // INSERT INTO public.users (ab_id, data, emitted_at) SELECT column1, parse_json(column2), - // column3 - // FROM VALUES - // (?, ?, ?), - // ... - val insertQuery = - String.format( - "INSERT INTO \"%s\".\"%s\" (\"%s\", \"%s\", \"%s\") SELECT column1, parse_json(column2), column3 FROM VALUES\n", - schemaName, - tableName, - JavaBaseConstants.COLUMN_NAME_AB_RAW_ID, - JavaBaseConstants.COLUMN_NAME_DATA, - JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT - ) - val recordQuery = "(?, ?, ?),\n" - insertRawRecordsInSingleQuery(insertQuery, recordQuery, database, records) - } - - @Throws(Exception::class) - override fun insertRecordsInternalV2( - jdbcDatabase: JdbcDatabase, - list: List, - s: String?, - s1: String? - ) { - // Snowflake doesn't have standard inserts... so we don't do this at real runtime. - // Intentionally do nothing. This method is called from the `check` method. - // It probably shouldn't be, but this is the easiest path to getting this working. - } - - protected fun generateFilesList(files: List): String { - if (0 < files.size && files.size < MAX_FILES_IN_LOADING_QUERY_LIMIT) { - // see - // https://docs.snowflake.com/en/user-guide/data-load-considerations-load.html#lists-of-files - val joiner = StringJoiner(",") - files.forEach( - Consumer { filename: String -> - joiner.add("'" + filename.substring(filename.lastIndexOf("/") + 1) + "'") - } - ) - return " files = ($joiner)" - } else { - return "" - } - } - - override fun checkForKnownConfigExceptions(e: Exception?): Optional { - if (e is SnowflakeSQLException && e.message!!.contains(NO_PRIVILEGES_ERROR_MESSAGE)) { - return Optional.of( - ConfigErrorException( - "Encountered Error with Snowflake Configuration: Current role does not have permissions on the target schema please verify your privileges", - e - ) - ) - } - if (e is SnowflakeSQLException && e.message!!.contains(IP_NOT_IN_WHITE_LIST_ERR_MSG)) { - return Optional.of( - ConfigErrorException( - """ - Snowflake has blocked access from Airbyte IP address. Please make sure that your Snowflake user account's - network policy allows access from all Airbyte IP addresses. See this page for the list of Airbyte IPs: - https://docs.airbyte.com/cloud/getting-started-with-airbyte-cloud#allowlist-ip-addresses and this page - for documentation on Snowflake network policies: https://docs.snowflake.com/en/user-guide/network-policies - - """.trimIndent(), - e - ) - ) - } - return Optional.empty() - } - - companion object { - const val RETENTION_PERIOD_DAYS_CONFIG_KEY: String = "retention_period_days" - - private val LOGGER: Logger = LoggerFactory.getLogger(SnowflakeSqlOperations::class.java) - private const val MAX_FILES_IN_LOADING_QUERY_LIMIT = 1000 - - // This is an unfortunately fragile way to capture this, but Snowflake doesn't - // provide a more specific permission exception error code - private const val NO_PRIVILEGES_ERROR_MESSAGE = "but current role has no privileges on it" - private const val IP_NOT_IN_WHITE_LIST_ERR_MSG = "not allowed to access Snowflake" - - private val retentionPeriodDaysFromConfigSingleton: Int - /** - * Sort of hacky. The problem is that SnowflakeSqlOperations is constructed in the - * SnowflakeDestination constructor, but we don't have the JsonNode config until we try - * to call check/getSerializedConsumer on the SnowflakeDestination. So we can't actually - * inject the config normally. Instead, we just use the singleton object. :( - */ - get() = - getRetentionPeriodDays(instance!!.getNodeValue(RETENTION_PERIOD_DAYS_CONFIG_KEY)) - - fun getRetentionPeriodDays(node: JsonNode?): Int { - val retentionPeriodDays = - if (node == null || node.isNull) { - 1 - } else { - node.asInt() - } - return retentionPeriodDays - } - } -} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlStagingOperations.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlStagingOperations.kt deleted file mode 100644 index 04db1238c31c..000000000000 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlStagingOperations.kt +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ -package io.airbyte.integrations.destination.snowflake - -import io.airbyte.cdk.db.jdbc.JdbcDatabase -import io.airbyte.cdk.integrations.base.JavaBaseConstants -import io.airbyte.cdk.integrations.destination.record_buffer.FileBuffer -import io.airbyte.cdk.integrations.destination.s3.csv.CsvSerializedBuffer -import io.airbyte.cdk.integrations.destination.s3.csv.StagingDatabaseCsvSheetGenerator -import io.airbyte.cdk.integrations.destination.staging.StagingOperations -import io.airbyte.commons.json.Jsons.jsonNode -import io.airbyte.protocol.models.v0.AirbyteRecordMessage -import java.util.Map - -abstract class SnowflakeSqlStagingOperations : SnowflakeSqlOperations(), StagingOperations { - /** - * This method is used in Check connection method to make sure that user has the Write - * permission - */ - @Suppress("deprecation") - @Throws(Exception::class) - internal fun attemptWriteToStage( - outputSchema: String?, - stageName: String, - database: JdbcDatabase? - ) { - val csvSerializedBuffer = - CsvSerializedBuffer( - FileBuffer(CsvSerializedBuffer.CSV_GZ_SUFFIX), - StagingDatabaseCsvSheetGenerator( - JavaBaseConstants.DestinationColumns.V2_WITHOUT_META - ), - true - ) - - // create a dummy stream\records that will bed used to test uploading - csvSerializedBuffer.accept( - AirbyteRecordMessage() - .withData(jsonNode(Map.of("testKey", "testValue"))) - .withEmittedAt(System.currentTimeMillis()) - ) - csvSerializedBuffer.flush() - - uploadRecordsToStage( - database, - csvSerializedBuffer, - outputSchema, - stageName, - if (stageName.endsWith("/")) stageName else "$stageName/" - ) - } -} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/migrations/SnowflakeDV2Migration.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/migrations/SnowflakeDV2Migration.kt new file mode 100644 index 000000000000..f8bb0657a5d0 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/migrations/SnowflakeDV2Migration.kt @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake.migrations + +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.NamingConventionTransformer +import io.airbyte.integrations.base.destination.typing_deduping.DestinationHandler +import io.airbyte.integrations.base.destination.typing_deduping.DestinationInitialStatus +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig +import io.airbyte.integrations.base.destination.typing_deduping.migrators.Migration +import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeSqlGenerator +import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeV1V2Migrator +import io.github.oshai.kotlinlogging.KotlinLogging + +private val log = KotlinLogging.logger {} + +class SnowflakeDV2Migration( + namingConventionTransformer: NamingConventionTransformer, + jdbcDatabase: JdbcDatabase, + databaseName: String, + private val sqlGenerator: SnowflakeSqlGenerator +) : Migration { + private val legacyV1V2migrator = + SnowflakeV1V2Migrator(namingConventionTransformer, jdbcDatabase, databaseName) + override fun migrateIfNecessary( + destinationHandler: DestinationHandler, + stream: StreamConfig, + state: DestinationInitialStatus + ): Migration.MigrationResult { + log.info { "Initializing DV2 Migration check" } + legacyV1V2migrator.migrateIfNecessary(sqlGenerator, destinationHandler, stream) + return Migration.MigrationResult(SnowflakeState(false), true) + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStagingClient.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStagingClient.kt new file mode 100644 index 000000000000..ff8f25119b64 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStagingClient.kt @@ -0,0 +1,244 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.integrations.destination.snowflake.operation + +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import io.airbyte.commons.string.Strings.join +import io.airbyte.integrations.base.destination.typing_deduping.StreamId +import io.airbyte.integrations.destination.snowflake.SnowflakeDatabaseUtils +import io.github.oshai.kotlinlogging.KotlinLogging +import java.io.IOException +import java.sql.SQLException +import java.util.* + +private val log = KotlinLogging.logger {} + +/** Client wrapper providing Snowflake Stage related operations. */ +class SnowflakeStagingClient(private val database: JdbcDatabase) { + + // Most of the code here is preserved from + // https://github.com/airbytehq/airbyte/blob/503b819b846663b0dff4c90322d0219a93e61d14/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperations.java + @Throws(IOException::class) + fun uploadRecordsToStage( + recordsData: SerializableBuffer, + stageName: String, + stagingPath: String + ): String { + val exceptionsThrown: MutableList = ArrayList() + var succeeded = false + while (exceptionsThrown.size < UPLOAD_RETRY_LIMIT && !succeeded) { + try { + uploadRecordsToBucket(stageName, stagingPath, recordsData) + succeeded = true + } catch (e: Exception) { + log.error(e) { "Failed to upload records into stage $stagingPath" } + exceptionsThrown.add(e) + } + if (!succeeded) { + log.info { + "Retrying to upload records into stage $stagingPath (${exceptionsThrown.size}/$UPLOAD_RETRY_LIMIT})" + } + } + } + if (!succeeded) { + throw RuntimeException( + String.format( + "Exceptions thrown while uploading records into stage: %s", + join(exceptionsThrown, "\n") + ) + ) + } + log.info { + "Successfully loaded records to stage $stagingPath with ${exceptionsThrown.size} re-attempt(s)" + } + return recordsData.filename + } + + @Throws(Exception::class) + private fun uploadRecordsToBucket( + stageName: String, + stagingPath: String, + recordsData: SerializableBuffer + ) { + val query = getPutQuery(stageName, stagingPath, recordsData.file!!.absolutePath) + log.info { "Executing query: $query" } + database.execute(query) + if (!checkStageObjectExists(stageName, stagingPath, recordsData.filename)) { + log.error { + "Failed to upload data into stage, object @${ + (stagingPath + "/" + recordsData.filename).replace( + "/+".toRegex(), + "/", + ) + } not found" + } + throw RuntimeException("Upload failed") + } + } + + internal fun getPutQuery(stageName: String, stagingPath: String, filePath: String): String { + return String.format( + PUT_FILE_QUERY, + filePath, + stageName, + stagingPath, + Runtime.getRuntime().availableProcessors() + ) + } + + @Throws(SQLException::class) + private fun checkStageObjectExists( + stageName: String, + stagingPath: String, + filename: String + ): Boolean { + val query = getListQuery(stageName, stagingPath, filename) + log.debug { "Executing query: $query" } + val result: Boolean + database.unsafeQuery(query).use { stream -> result = stream.findAny().isPresent } + return result + } + + /** + * Creates a SQL query to list file which is staged + * + * @param stageName name of staging folder + * @param stagingPath path to the files within the staging folder + * @param filename name of the file within staging area + * @return SQL query string + */ + internal fun getListQuery(stageName: String, stagingPath: String, filename: String): String { + return String.format(LIST_STAGE_QUERY, stageName, stagingPath, filename) + .replace("/+".toRegex(), "/") + } + + @Throws(Exception::class) + fun createStageIfNotExists(stageName: String) { + val query = getCreateStageQuery(stageName) + log.debug { "Executing query: $query" } + try { + database.execute(query) + } catch (e: Exception) { + throw SnowflakeDatabaseUtils.checkForKnownConfigExceptions(e).orElseThrow { e } + } + } + + /** + * Creates a SQL query to create a staging folder. This query will create a staging folder if + * one previously did not exist + * + * @param stageName name of the staging folder + * @return SQL query string + */ + internal fun getCreateStageQuery(stageName: String): String { + return String.format(CREATE_STAGE_QUERY, stageName) + } + + @Throws(SQLException::class) + fun copyIntoTableFromStage( + stageName: String, + stagingPath: String, + stagedFiles: List, + streamId: StreamId + ) { + try { + val query = getCopyQuery(stageName, stagingPath, stagedFiles, streamId) + log.info { "Executing query: $query" } + database.execute(query) + } catch (e: SQLException) { + throw SnowflakeDatabaseUtils.checkForKnownConfigExceptions(e).orElseThrow { e } + } + } + + /** + * Creates a SQL query to bulk copy data into fully qualified destination table See + * https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html for more context + * + * @param stageName name of staging folder + * @param stagingPath path of staging folder to data files + * @param stagedFiles collection of the staging files + * @param streamId + * @return SQL query string + */ + internal fun getCopyQuery( + stageName: String, + stagingPath: String, + stagedFiles: List, + streamId: StreamId + ): String { + return String.format( + COPY_QUERY_1S1T + generateFilesList(stagedFiles) + ";", + streamId.rawNamespace, + streamId.rawName, + stageName, + stagingPath + ) + } + + // TODO: Do we need this sketchy logic when all we use is just 1 file. + private fun generateFilesList(files: List): String { + if (0 < files.size && files.size < MAX_FILES_IN_LOADING_QUERY_LIMIT) { + // see + // https://docs.snowflake.com/en/user-guide/data-load-considerations-load.html#lists-of-files + val filesString = + files.joinToString { filename: String -> + "'${ + filename.substring( + filename.lastIndexOf("/") + 1, + ) + }'" + } + return " files = ($filesString)" + } else { + return "" + } + } + + @Throws(Exception::class) + fun dropStageIfExists(stageName: String) { + try { + val query = getDropQuery(stageName) + log.debug { "Executing query: $query" } + database.execute(query) + } catch (e: SQLException) { + throw SnowflakeDatabaseUtils.checkForKnownConfigExceptions(e).orElseThrow { e } + } + } + + /** + * Creates a SQL query to drop staging area and all associated files within the staged area + * https://docs.snowflake.com/en/sql-reference/sql/drop-stage + * @param stageName name of staging folder + * @return SQL query string + */ + internal fun getDropQuery(stageName: String?): String { + return String.format(DROP_STAGE_QUERY, stageName) + } + + companion object { + private const val UPLOAD_RETRY_LIMIT: Int = 3 + private const val MAX_FILES_IN_LOADING_QUERY_LIMIT = 1000 + private const val CREATE_STAGE_QUERY = + "CREATE STAGE IF NOT EXISTS %s encryption = (type = 'SNOWFLAKE_SSE') copy_options = (on_error='skip_file');" + private const val PUT_FILE_QUERY = "PUT file://%s @%s/%s PARALLEL = %d;" + private const val LIST_STAGE_QUERY = "LIST @%s/%s/%s;" + + // the 1s1t copy query explicitly quotes the raw table+schema name. + // TODO: https://github.com/airbytehq/airbyte/issues/36410 for improved error handling. + private val COPY_QUERY_1S1T = + """ + |COPY INTO "%s"."%s" FROM '@%s/%s' + |file_format = ( + | type = csv + | compression = auto + | field_delimiter = ',' + | skip_header = 0 + | FIELD_OPTIONALLY_ENCLOSED_BY = '"' + | NULL_IF=('') + |) + """.trimMargin() + private const val DROP_STAGE_QUERY = "DROP STAGE IF EXISTS %s;" + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStorageOperation.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStorageOperation.kt new file mode 100644 index 000000000000..7e5d3dac1642 --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStorageOperation.kt @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake.operation + +import io.airbyte.cdk.integrations.base.JavaBaseConstants +import io.airbyte.cdk.integrations.destination.StandardNameTransformer +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import io.airbyte.integrations.base.destination.operation.StorageOperation +import io.airbyte.integrations.base.destination.typing_deduping.Sql +import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig +import io.airbyte.integrations.base.destination.typing_deduping.StreamId +import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil +import io.airbyte.integrations.destination.snowflake.SnowflakeSQLNameTransformer +import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeDestinationHandler +import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeSqlGenerator +import io.airbyte.protocol.models.v0.DestinationSyncMode +import io.github.oshai.kotlinlogging.KotlinLogging +import java.time.Instant +import java.time.ZoneOffset +import java.time.ZonedDateTime +import java.util.* + +private val log = KotlinLogging.logger {} + +class SnowflakeStorageOperation( + private val sqlGenerator: SnowflakeSqlGenerator, + private val destinationHandler: SnowflakeDestinationHandler, + private val retentionPeriodDays: Int, + private val staging: SnowflakeStagingClient, + private val nameTransformer: StandardNameTransformer = SnowflakeSQLNameTransformer(), +) : StorageOperation { + + private val connectionId = UUID.randomUUID() + private val syncDateTime = Instant.now() + + override fun prepareStage(streamId: StreamId, destinationSyncMode: DestinationSyncMode) { + // create raw table + destinationHandler.execute(Sql.of(createTableQuery(streamId))) + if (destinationSyncMode == DestinationSyncMode.OVERWRITE) { + destinationHandler.execute(Sql.of(truncateTableQuery(streamId))) + } + // create stage + staging.createStageIfNotExists(getStageName(streamId)) + } + + internal fun createTableQuery(streamId: StreamId): String { + return """ + |CREATE TABLE IF NOT EXISTS "${streamId.rawNamespace}"."${streamId.rawName}"( + | "${JavaBaseConstants.COLUMN_NAME_AB_RAW_ID}" VARCHAR PRIMARY KEY, + | "${JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT}" TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp(), + | "${JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT}" TIMESTAMP WITH TIME ZONE DEFAULT NULL, + | "${JavaBaseConstants.COLUMN_NAME_DATA}" VARIANT + |) data_retention_time_in_days = $retentionPeriodDays; + """.trimMargin() + } + + internal fun truncateTableQuery(streamId: StreamId): String { + return "TRUNCATE TABLE \"${streamId.rawNamespace}\".\"${streamId.rawName}\";\n" + } + + override fun writeToStage(streamId: StreamId, data: SerializableBuffer) { + val stageName = getStageName(streamId) + val stagingPath = getStagingPath() + val stagedFileName = staging.uploadRecordsToStage(data, stageName, stagingPath) + staging.copyIntoTableFromStage(stageName, stagingPath, listOf(stagedFileName), streamId) + } + override fun cleanupStage(streamId: StreamId) { + val stageName = getStageName(streamId) + log.info { "Cleaning up stage $stageName" } + staging.dropStageIfExists(stageName) + } + + internal fun getStageName(streamId: StreamId): String { + return """ + "${nameTransformer.convertStreamName(streamId.rawNamespace)}"."${ nameTransformer.convertStreamName(streamId.rawName)}" + """.trimIndent() + } + + private fun getStagingPath(): String { + // see https://docs.snowflake.com/en/user-guide/data-load-considerations-stage.html + val zonedDateTime = ZonedDateTime.ofInstant(syncDateTime, ZoneOffset.UTC) + return nameTransformer.applyDefaultCase( + String.format( + "%s/%02d/%02d/%02d/%s/", + zonedDateTime.year, + zonedDateTime.monthValue, + zonedDateTime.dayOfMonth, + zonedDateTime.hour, + connectionId + ) + ) + } + + override fun createFinalTable(streamConfig: StreamConfig, suffix: String, replace: Boolean) { + destinationHandler.execute(sqlGenerator.createTable(streamConfig, suffix, replace)) + } + + override fun overwriteFinalTable(streamConfig: StreamConfig, tmpTableSuffix: String) { + if (tmpTableSuffix.isNotBlank()) { + log.info { + "Overwriting table ${streamConfig.id.finalTableId(SnowflakeSqlGenerator.QUOTE)} with ${ + streamConfig.id.finalTableId( + SnowflakeSqlGenerator.QUOTE, + tmpTableSuffix, + ) + }" + } + destinationHandler.execute( + sqlGenerator.overwriteFinalTable(streamConfig.id, tmpTableSuffix) + ) + } + } + + override fun softResetFinalTable(streamConfig: StreamConfig) { + TyperDeduperUtil.executeSoftReset(sqlGenerator, destinationHandler, streamConfig) + } + + override fun typeAndDedupe( + streamConfig: StreamConfig, + maxProcessedTimestamp: Optional, + finalTableSuffix: String + ) { + TyperDeduperUtil.executeTypeAndDedupe( + sqlGenerator = sqlGenerator, + destinationHandler = destinationHandler, + streamConfig, + maxProcessedTimestamp, + finalTableSuffix + ) + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandler.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandler.kt index 1d602d18cac4..e84c39be90f7 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandler.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandler.kt @@ -23,6 +23,7 @@ import io.airbyte.integrations.base.destination.typing_deduping.StreamId import io.airbyte.integrations.base.destination.typing_deduping.Struct import io.airbyte.integrations.base.destination.typing_deduping.Union import io.airbyte.integrations.base.destination.typing_deduping.UnsupportedOneOf +import io.airbyte.integrations.destination.snowflake.SnowflakeDatabaseUtils import io.airbyte.integrations.destination.snowflake.migrations.SnowflakeState import io.airbyte.protocol.models.v0.DestinationSyncMode import java.sql.Connection @@ -31,8 +32,6 @@ import java.sql.ResultSet import java.sql.SQLException import java.time.Instant import java.util.* -import java.util.function.Function -import java.util.function.Predicate import java.util.stream.Collectors import net.snowflake.client.jdbc.SnowflakeSQLException import org.apache.commons.text.StringSubstitutor @@ -58,23 +57,19 @@ class SnowflakeDestinationHandler( @Throws(SQLException::class) private fun getFinalTableRowCount( streamIds: List - ): java.util.LinkedHashMap> { - val tableRowCounts = java.util.LinkedHashMap>() - val paramHolder = java.lang.String.join(",", Collections.nCopies(streamIds.size, "?")) + ): LinkedHashMap> { + val tableRowCounts = LinkedHashMap>() // convert list stream to array val namespaces = streamIds.map { it.finalNamespace }.toTypedArray() val names = streamIds.map { it.finalName }.toTypedArray() val query = """ - SELECT table_schema, table_name, row_count - FROM information_schema.tables - WHERE table_catalog = ? - AND table_schema IN (%s) - AND table_name IN (%s) - - """ - .trimIndent() - .formatted(paramHolder, paramHolder) + |SELECT table_schema, table_name, row_count + |FROM information_schema.tables + |WHERE table_catalog = ? + |AND table_schema IN (${IntRange(1, streamIds.size).joinToString { "?" }}) + |AND table_name IN (${IntRange(1, streamIds.size).joinToString { "?" }}) + |""".trimMargin() val bindValues = arrayOf(databaseName) + namespaces + names val results: List = database.queryJsons(query, *bindValues) for (result in results) { @@ -82,8 +77,8 @@ class SnowflakeDestinationHandler( val tableName = result["TABLE_NAME"].asText() val rowCount = result["ROW_COUNT"].asInt() tableRowCounts - .computeIfAbsent(tableSchema) { k: String? -> java.util.LinkedHashMap() }[ - tableName] = rowCount + .computeIfAbsent(tableSchema) { _: String? -> LinkedHashMap() }[tableName] = + rowCount } return tableRowCounts } @@ -95,7 +90,11 @@ class SnowflakeDestinationHandler( ): InitialRawTableStatus { // Short-circuit for overwrite, table will be truncated anyway if (destinationSyncMode == DestinationSyncMode.OVERWRITE) { - return InitialRawTableStatus(false, false, Optional.empty()) + return InitialRawTableStatus( + rawTableExists = false, + hasUnprocessedRecords = false, + maxProcessedTimestamp = Optional.empty() + ) } val tableExists = database.executeMetadataQuery { databaseMetaData: DatabaseMetaData -> @@ -112,7 +111,11 @@ class SnowflakeDestinationHandler( } } if (!tableExists) { - return InitialRawTableStatus(false, false, Optional.empty()) + return InitialRawTableStatus( + rawTableExists = false, + hasUnprocessedRecords = false, + maxProcessedTimestamp = Optional.empty() + ) } // Snowflake timestamps have nanosecond precision, so decrement by 1ns // And use two explicit queries because COALESCE doesn't short-circuit. @@ -126,9 +129,9 @@ class SnowflakeDestinationHandler( .createStatement() .executeQuery( StringSubstitutor( - java.util.Map.of( + java.util.Map.of( "raw_table", - id.rawTableId(SnowflakeSqlGenerator.Companion.QUOTE) + id.rawTableId(SnowflakeSqlGenerator.QUOTE) ) ) .replace( @@ -158,9 +161,10 @@ class SnowflakeDestinationHandler( ) if (minUnloadedTimestamp.isPresent) { return InitialRawTableStatus( - true, - true, - minUnloadedTimestamp.map { text: String? -> Instant.parse(text) } + rawTableExists = true, + hasUnprocessedRecords = true, + maxProcessedTimestamp = + minUnloadedTimestamp.map { text: String? -> Instant.parse(text) } ) } @@ -181,9 +185,9 @@ class SnowflakeDestinationHandler( .createStatement() .executeQuery( StringSubstitutor( - java.util.Map.of( + java.util.Map.of( "raw_table", - id.rawTableId(SnowflakeSqlGenerator.Companion.QUOTE) + id.rawTableId(SnowflakeSqlGenerator.QUOTE) ) ) .replace( @@ -212,9 +216,9 @@ class SnowflakeDestinationHandler( .first() ) return InitialRawTableStatus( - true, - false, - maxTimestamp.map { text: String? -> Instant.parse(text) } + rawTableExists = true, + hasUnprocessedRecords = false, + maxProcessedTimestamp = maxTimestamp.map { text: String? -> Instant.parse(text) } ) } @@ -241,7 +245,9 @@ class SnowflakeDestinationHandler( } else { e.message } - throw RuntimeException(trimmedMessage, e) + throw SnowflakeDatabaseUtils.checkForKnownConfigExceptions(e).orElseThrow { + RuntimeException(trimmedMessage, e) + } } LOGGER.info( @@ -254,7 +260,7 @@ class SnowflakeDestinationHandler( } private fun getPks(stream: StreamConfig?): Set { - return if (stream!!.primaryKey != null) stream.primaryKey.map(ColumnId::name).toSet() + return if (stream?.primaryKey != null) stream.primaryKey.map { it.name }.toSet() else emptySet() } @@ -306,14 +312,11 @@ class SnowflakeDestinationHandler( .stream() .collect( { LinkedHashMap() }, - { - map: java.util.LinkedHashMap, - column: Map.Entry -> + { map: LinkedHashMap, column: Map.Entry + -> map[column.key.name] = toJdbcTypeName(column.value) }, - { - obj: java.util.LinkedHashMap, - m: java.util.LinkedHashMap? -> + { obj: LinkedHashMap, m: LinkedHashMap? -> obj.putAll(m!!) } ) @@ -324,27 +327,17 @@ class SnowflakeDestinationHandler( .stream() .filter { column: Map.Entry -> JavaBaseConstants.V2_FINAL_TABLE_METADATA_COLUMNS.stream() - .map( - Function { obj: String -> - obj.uppercase(Locale.getDefault()) - } - ) - .noneMatch( - Predicate { airbyteColumnName: String -> - airbyteColumnName == column.key - } - ) + .map { obj: String -> obj.uppercase(Locale.getDefault()) } + .noneMatch { airbyteColumnName: String -> airbyteColumnName == column.key } } .collect( { LinkedHashMap() }, { - map: java.util.LinkedHashMap, + map: LinkedHashMap, column: Map.Entry -> map[column.key] = column.value.type }, - { - obj: java.util.LinkedHashMap, - m: java.util.LinkedHashMap? -> + { obj: LinkedHashMap, m: LinkedHashMap? -> obj.putAll(m!!) } ) @@ -369,7 +362,7 @@ class SnowflakeDestinationHandler( val tableRowCounts = getFinalTableRowCount(streamIds) return streamConfigs .stream() - .map> { streamConfig: StreamConfig -> + .map { streamConfig: StreamConfig -> try { val namespace = streamConfig.id.finalNamespace.uppercase(Locale.getDefault()) val name = streamConfig.id.finalName.uppercase(Locale.getDefault()) @@ -406,7 +399,7 @@ class SnowflakeDestinationHandler( throw RuntimeException(e) } } - .collect(Collectors.toList>()) + .collect(Collectors.toList()) } override fun toJdbcTypeName(airbyteType: AirbyteType): String { @@ -430,22 +423,35 @@ class SnowflakeDestinationHandler( } private fun toJdbcTypeName(airbyteProtocolType: AirbyteProtocolType): String { - return when (airbyteProtocolType) { - AirbyteProtocolType.STRING -> "TEXT" - AirbyteProtocolType.NUMBER -> "FLOAT" - AirbyteProtocolType.INTEGER -> "NUMBER" - AirbyteProtocolType.BOOLEAN -> "BOOLEAN" - AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE -> "TIMESTAMP_TZ" - AirbyteProtocolType.TIMESTAMP_WITHOUT_TIMEZONE -> "TIMESTAMP_NTZ" - AirbyteProtocolType.TIME_WITH_TIMEZONE -> "TEXT" - AirbyteProtocolType.TIME_WITHOUT_TIMEZONE -> "TIME" - AirbyteProtocolType.DATE -> "DATE" - AirbyteProtocolType.UNKNOWN -> "VARIANT" - } + return SnowflakeDatabaseUtils.toSqlTypeName(airbyteProtocolType) } override fun createNamespaces(schemas: Set) { - // do nothing? + schemas.forEach { + try { + // 1s1t is assuming a lowercase airbyte_internal schema name, so we need to quote it + // we quote for final schemas names too (earlier existed in + // SqlGenerator#createSchema). + if (!isSchemaExists(it)) { + LOGGER.info("Schema $it does not exist, proceeding to create one") + database.execute(String.format("CREATE SCHEMA IF NOT EXISTS \"%s\";", it)) + } + } catch (e: Exception) { + throw SnowflakeDatabaseUtils.checkForKnownConfigExceptions(e).orElseThrow { e } + } + } + } + + private fun isSchemaExists(schema: String): Boolean { + try { + database.unsafeQuery(SHOW_SCHEMAS).use { results -> + return results + .map { schemas: JsonNode -> schemas[NAME].asText() } + .anyMatch { anObject: String -> schema == anObject } + } + } catch (e: Exception) { + throw SnowflakeDatabaseUtils.checkForKnownConfigExceptions(e).orElseThrow { e } + } } companion object { @@ -453,31 +459,28 @@ class SnowflakeDestinationHandler( LoggerFactory.getLogger(SnowflakeDestinationHandler::class.java) const val EXCEPTION_COMMON_PREFIX: String = "JavaScript execution error: Uncaught Execution of multiple statements failed on statement" + const val SHOW_SCHEMAS: String = "show schemas;" + const val NAME: String = "name" @Throws(SQLException::class) fun findExistingTables( database: JdbcDatabase, databaseName: String, streamIds: List - ): java.util.LinkedHashMap> { - val existingTables = - java.util.LinkedHashMap>() - val paramHolder = java.lang.String.join(",", Collections.nCopies(streamIds.size, "?")) + ): LinkedHashMap> { + val existingTables = LinkedHashMap>() // convert list stream to array val namespaces = streamIds.map { it.finalNamespace }.toTypedArray() val names = streamIds.map { it.finalName }.toTypedArray() val query = """ - SELECT table_schema, table_name, column_name, data_type, is_nullable - FROM information_schema.columns - WHERE table_catalog = ? - AND table_schema IN (%s) - AND table_name IN (%s) - ORDER BY table_schema, table_name, ordinal_position; - - """ - .trimIndent() - .formatted(paramHolder, paramHolder) + |SELECT table_schema, table_name, column_name, data_type, is_nullable + |FROM information_schema.columns + |WHERE table_catalog = ? + |AND table_schema IN (${IntRange(1, streamIds.size).joinToString { "?" }}) + |AND table_name IN (${IntRange(1, streamIds.size).joinToString { "?" }}) + |ORDER BY table_schema, table_name, ordinal_position; + |""".trimMargin() val bindValues = arrayOf(databaseName.uppercase(Locale.getDefault())) + namespaces + names @@ -490,9 +493,9 @@ class SnowflakeDestinationHandler( val isNullable = result["IS_NULLABLE"].asText() val tableDefinition = existingTables - .computeIfAbsent(tableSchema) { k: String? -> java.util.LinkedHashMap() } - .computeIfAbsent(tableName) { k: String? -> - TableDefinition(java.util.LinkedHashMap()) + .computeIfAbsent(tableSchema) { _: String? -> LinkedHashMap() } + .computeIfAbsent(tableName) { _: String? -> + TableDefinition(LinkedHashMap()) } tableDefinition.columns[columnName] = ColumnDefinition(columnName, dataType, 0, fromIsNullableIsoString(isNullable)) diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.kt index 2cb5f8c85810..c15ff602471b 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.kt @@ -23,6 +23,7 @@ import io.airbyte.integrations.base.destination.typing_deduping.Struct import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.SOFT_RESET_SUFFIX import io.airbyte.integrations.base.destination.typing_deduping.Union import io.airbyte.integrations.base.destination.typing_deduping.UnsupportedOneOf +import io.airbyte.integrations.destination.snowflake.SnowflakeDatabaseUtils import io.airbyte.protocol.models.v0.DestinationSyncMode import java.time.Instant import java.util.* @@ -31,7 +32,7 @@ import org.apache.commons.lang3.StringUtils import org.apache.commons.text.StringSubstitutor class SnowflakeSqlGenerator(private val retentionPeriodDays: Int) : SqlGenerator { - private val CDC_DELETED_AT_COLUMN = buildColumnId("_ab_cdc_deleted_at") + private val cdcDeletedAtColumn = buildColumnId("_ab_cdc_deleted_at") override fun buildStreamId( namespace: String, @@ -76,20 +77,8 @@ class SnowflakeSqlGenerator(private val retentionPeriodDays: Int) : SqlGenerator throw IllegalArgumentException("Unsupported AirbyteType: $type") } - fun toDialectType(airbyteProtocolType: AirbyteProtocolType): String { - // TODO verify these types against normalization - return when (airbyteProtocolType) { - AirbyteProtocolType.STRING -> "TEXT" - AirbyteProtocolType.NUMBER -> "FLOAT" - AirbyteProtocolType.INTEGER -> "NUMBER" - AirbyteProtocolType.BOOLEAN -> "BOOLEAN" - AirbyteProtocolType.TIMESTAMP_WITH_TIMEZONE -> "TIMESTAMP_TZ" - AirbyteProtocolType.TIMESTAMP_WITHOUT_TIMEZONE -> "TIMESTAMP_NTZ" - AirbyteProtocolType.TIME_WITH_TIMEZONE -> "TEXT" - AirbyteProtocolType.TIME_WITHOUT_TIMEZONE -> "TIME" - AirbyteProtocolType.DATE -> "DATE" - AirbyteProtocolType.UNKNOWN -> "VARIANT" - } + private fun toDialectType(airbyteProtocolType: AirbyteProtocolType): String { + return SnowflakeDatabaseUtils.toSqlTypeName(airbyteProtocolType) } override fun createSchema(schema: String): Sql { @@ -353,7 +342,7 @@ class SnowflakeSqlGenerator(private val retentionPeriodDays: Int) : SqlGenerator if (stream.destinationSyncMode == DestinationSyncMode.APPEND_DEDUP) { var cdcConditionalOrIncludeStatement = "" - if (stream.columns.containsKey(CDC_DELETED_AT_COLUMN)) { + if (stream.columns.containsKey(cdcDeletedAtColumn)) { cdcConditionalOrIncludeStatement = """ OR ( @@ -517,7 +506,7 @@ class SnowflakeSqlGenerator(private val retentionPeriodDays: Int) : SqlGenerator if (stream.destinationSyncMode != DestinationSyncMode.APPEND_DEDUP) { return "" } - if (!stream.columns.containsKey(CDC_DELETED_AT_COLUMN)) { + if (!stream.columns.containsKey(cdcDeletedAtColumn)) { return "" } @@ -699,13 +688,13 @@ class SnowflakeSqlGenerator(private val retentionPeriodDays: Int) : SqlGenerator * This method is separate from [.escapeJsonIdentifier] because we need to retain the * original field name for JSON access, e.g. `SELECT "_airbyte_data":"${FOO" AS "__FOO"`. */ - fun escapeSqlIdentifier(identifier: String): String { + fun escapeSqlIdentifier(inputIdentifier: String): String { // Snowflake scripting language does something weird when the `${` bigram shows up in // the script // so replace these with something else. // For completeness, if we trigger this, also replace closing curly braces with // underscores. - var identifier = identifier + var identifier = inputIdentifier if (identifier.contains("\${")) { identifier = identifier.replace("$", "_").replace("{", "_").replace("}", "_") } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeV1V2Migrator.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeV1V2Migrator.kt index a7082cc80da8..258268e970ff 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeV1V2Migrator.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeV1V2Migrator.kt @@ -26,19 +26,19 @@ class SnowflakeV1V2Migrator( @SneakyThrows @Throws(Exception::class) override fun doesAirbyteInternalNamespaceExist(streamConfig: StreamConfig?): Boolean { - return !database + return database .queryJsons( """ - SELECT SCHEMA_NAME - FROM information_schema.schemata - WHERE schema_name = ? - AND catalog_name = ?; - - """.trimIndent(), + SELECT SCHEMA_NAME + FROM information_schema.schemata + WHERE schema_name = ? + AND catalog_name = ?; + + """.trimIndent(), streamConfig!!.id.rawNamespace, databaseName ) - .isEmpty() + .isNotEmpty() } override fun schemaMatchesExpectation( diff --git a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeV2TableMigrator.kt b/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeV2TableMigrator.kt deleted file mode 100644 index cf585b8ef25e..000000000000 --- a/airbyte-integrations/connectors/destination-snowflake/src/main/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeV2TableMigrator.kt +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ -package io.airbyte.integrations.destination.snowflake.typing_deduping - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings -import io.airbyte.cdk.db.jdbc.JdbcDatabase -import io.airbyte.cdk.integrations.base.JavaBaseConstants -import io.airbyte.cdk.integrations.base.TypingAndDedupingFlag.getRawNamespaceOverride -import io.airbyte.cdk.integrations.destination.jdbc.TableDefinition -import io.airbyte.integrations.base.destination.typing_deduping.StreamConfig -import io.airbyte.integrations.base.destination.typing_deduping.StreamId -import io.airbyte.integrations.base.destination.typing_deduping.StreamId.Companion.concatenateRawTableName -import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.executeSoftReset -import io.airbyte.integrations.base.destination.typing_deduping.V2TableMigrator -import io.airbyte.integrations.destination.snowflake.SnowflakeDestination -import io.airbyte.protocol.models.v0.DestinationSyncMode -import java.sql.SQLException -import java.util.* -import java.util.List -import kotlin.collections.LinkedHashMap -import org.slf4j.Logger -import org.slf4j.LoggerFactory - -class SnowflakeV2TableMigrator( - private val database: JdbcDatabase, - private val databaseName: String, - private val generator: SnowflakeSqlGenerator, - private val handler: SnowflakeDestinationHandler -) : V2TableMigrator { - private val rawNamespace: String = - getRawNamespaceOverride(SnowflakeDestination.RAW_SCHEMA_OVERRIDE) - .orElse(JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE) - - @SuppressFBWarnings("NP_PARAMETER_MUST_BE_NONNULL_BUT_MARKED_AS_NULLABLE") - @Throws(Exception::class) - override fun migrateIfNecessary(streamConfig: StreamConfig?) { - val caseSensitiveStreamId = - buildStreamId_caseSensitive( - streamConfig!!.id.originalNamespace, - streamConfig.id.originalName, - rawNamespace - ) - val syncModeRequiresMigration = - streamConfig.destinationSyncMode != DestinationSyncMode.OVERWRITE - val existingTableCaseSensitiveExists = findExistingTable(caseSensitiveStreamId).isPresent - val existingTableUppercaseDoesNotExist = findExistingTable(streamConfig.id).isEmpty - LOGGER.info( - "Checking whether upcasing migration is necessary for {}.{}. Sync mode requires migration: {}; existing case-sensitive table exists: {}; existing uppercased table does not exist: {}", - streamConfig.id.originalNamespace, - streamConfig.id.originalName, - syncModeRequiresMigration, - existingTableCaseSensitiveExists, - existingTableUppercaseDoesNotExist - ) - if ( - syncModeRequiresMigration && - existingTableCaseSensitiveExists && - existingTableUppercaseDoesNotExist - ) { - LOGGER.info( - "Executing upcasing migration for {}.{}", - streamConfig.id.originalNamespace, - streamConfig.id.originalName - ) - executeSoftReset(generator, handler, streamConfig) - } - } - - @Throws(SQLException::class) - private fun findExistingTable(id: StreamId): Optional { - // The obvious database.getMetaData().getColumns() solution doesn't work, because JDBC - // translates - // VARIANT as VARCHAR - val existingTableMap: LinkedHashMap> = - SnowflakeDestinationHandler.Companion.findExistingTables( - database, - databaseName, - List.of(id) - ) - if ( - existingTableMap.containsKey(id.finalNamespace) && - existingTableMap[id.finalNamespace]!!.containsKey(id.finalName) - ) { - return Optional.of(existingTableMap[id.finalNamespace]!![id.finalName]!!) - } - return Optional.empty() - } - - companion object { - private val LOGGER: Logger = LoggerFactory.getLogger(SnowflakeV2TableMigrator::class.java) - - // These methods were copied from - // https://github.com/airbytehq/airbyte/blob/d5fdb1b982d464f54941bf9a830b9684fb47d249/airbyte-integrations/connectors/destination-snowflake/src/main/java/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGenerator.java - // which is the highest version of destination-snowflake that still uses - // quoted+case-sensitive - // identifiers - private fun buildStreamId_caseSensitive( - namespace: String, - name: String, - rawNamespaceOverride: String - ): StreamId { - // No escaping needed, as far as I can tell. We quote all our identifier names. - return StreamId( - escapeIdentifier_caseSensitive(namespace), - escapeIdentifier_caseSensitive(name), - escapeIdentifier_caseSensitive(rawNamespaceOverride), - escapeIdentifier_caseSensitive(concatenateRawTableName(namespace, name)), - namespace, - name - ) - } - - private fun escapeIdentifier_caseSensitive(identifier: String): String { - // Note that we don't need to escape backslashes here! - // The only special character in an identifier is the double-quote, which needs to be - // doubled. - return identifier.replace("\"", "\"\"") - } - } -} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.kt index 1ee4a151fcec..38be9827db01 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationIntegrationTest.kt @@ -44,15 +44,24 @@ internal class SnowflakeDestinationIntegrationTest { Assertions.assertEquals(AirbyteConnectionStatus.Status.FAILED, check!!.status) } + @Test + fun testCheckSuccessTest() { + val credentialsJsonString = + deserialize(Files.readString(Paths.get("secrets/1s1t_internal_staging_config.json"))) + val check = + SnowflakeDestination(OssCloudEnvVarConsts.AIRBYTE_OSS).check(credentialsJsonString) + Assertions.assertEquals(AirbyteConnectionStatus.Status.SUCCEEDED, check!!.status) + } + @Test @Throws(Exception::class) fun testInvalidSchemaName() { val config = config val schema = config["schema"].asText() val dataSource: DataSource = - SnowflakeDatabase.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS) + SnowflakeDatabaseUtils.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS) try { - val database = SnowflakeDatabase.getDatabase(dataSource) + val database = SnowflakeDatabaseUtils.getDatabase(dataSource) Assertions.assertDoesNotThrow { syncWithNamingResolver(database, schema) } Assertions.assertThrows(SQLException::class.java) { syncWithoutNamingResolver(database, schema) diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.kt index 50f2220abbe8..0e3779051446 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInsertDestinationAcceptanceTest.kt @@ -30,7 +30,6 @@ import java.sql.Connection import java.sql.ResultSet import java.sql.SQLException import java.util.* -import java.util.function.Function import java.util.stream.Collectors import javax.sql.DataSource import org.assertj.core.api.AssertionsForClassTypes @@ -50,10 +49,10 @@ open class SnowflakeInsertDestinationAcceptanceTest : DestinationAcceptanceTest( val insertConfig = deserialize(readFile(Path.of("secrets/insert_config.json"))) return insertConfig } - private var config: JsonNode = Jsons.clone(staticConfig) + private var config: JsonNode = Jsons.clone(staticConfig) private var dataSource: DataSource = - SnowflakeDatabase.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS) - private var database: JdbcDatabase = SnowflakeDatabase.getDatabase(dataSource) + SnowflakeDatabaseUtils.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS) + private var database: JdbcDatabase = SnowflakeDatabaseUtils.getDatabase(dataSource) @BeforeEach fun setup() { @@ -64,7 +63,7 @@ open class SnowflakeInsertDestinationAcceptanceTest : DestinationAcceptanceTest( get() = "airbyte/destination-snowflake:dev" override fun getConfig(): JsonNode { - return config!! + return config } override fun getTestDataComparator(): TestDataComparator { @@ -95,7 +94,7 @@ open class SnowflakeInsertDestinationAcceptanceTest : DestinationAcceptanceTest( @Throws(Exception::class) override fun retrieveRecords( - env: TestDestinationEnv?, + testEnv: TestDestinationEnv?, streamName: String, namespace: String, streamSchema: JsonNode @@ -109,11 +108,7 @@ open class SnowflakeInsertDestinationAcceptanceTest : DestinationAcceptanceTest( ) return retrieveRecordsFromTable(streamId.rawName, streamId.rawNamespace) .stream() - .map( - Function { r: JsonNode -> - r.get(JavaBaseConstants.COLUMN_NAME_DATA) - } - ) + .map { r: JsonNode -> r.get(JavaBaseConstants.COLUMN_NAME_DATA) } .collect(Collectors.toList()) } @@ -146,7 +141,7 @@ open class SnowflakeInsertDestinationAcceptanceTest : DestinationAcceptanceTest( val timeZone = TimeZone.getTimeZone("UTC") TimeZone.setDefault(timeZone) - return database!!.bufferedResultSetQuery( + return database.bufferedResultSetQuery( { connection: Connection -> connection .createStatement() @@ -187,14 +182,15 @@ open class SnowflakeInsertDestinationAcceptanceTest : DestinationAcceptanceTest( this.config = Jsons.clone(staticConfig) (config as ObjectNode?)!!.put("schema", schemaName) - dataSource = SnowflakeDatabase.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS) - database = SnowflakeDatabase.getDatabase(dataSource) + dataSource = + SnowflakeDatabaseUtils.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS) + database = SnowflakeDatabaseUtils.getDatabase(dataSource) database.execute(createSchemaQuery) } @Throws(Exception::class) override fun tearDown(testEnv: TestDestinationEnv) { - testSchemas.add(config!!["schema"].asText()) + testSchemas.add(config["schema"].asText()) for (schema in testSchemas) { // we need to wrap namespaces in quotes, but that means we have to manually upcase them. // thanks, v1 destinations! @@ -203,7 +199,7 @@ open class SnowflakeInsertDestinationAcceptanceTest : DestinationAcceptanceTest( // but it's approximately correct and maybe works for some things. val mangledSchema = schema.uppercase(Locale.getDefault()) val dropSchemaQuery = String.format("DROP SCHEMA IF EXISTS \"%s\"", mangledSchema) - database!!.execute(dropSchemaQuery) + database.execute(dropSchemaQuery) } close(dataSource) diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeTestDataComparator.kt b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeTestDataComparator.kt index 89b4c0c8054a..d815064b02d7 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeTestDataComparator.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeTestDataComparator.kt @@ -49,17 +49,23 @@ class SnowflakeTestDataComparator : AdvancedTestDataComparator() { } } - override fun compareDateTimeValues(expectedValue: String, actualValue: String): Boolean { - val destinationDate = parseLocalDate(actualValue) + override fun compareDateTimeValues( + airbyteMessageValue: String, + destinationValue: String + ): Boolean { + val destinationDate = parseLocalDate(destinationValue) val expectedDate = - LocalDate.parse(expectedValue, DateTimeFormatter.ofPattern(AIRBYTE_DATETIME_FORMAT)) + LocalDate.parse( + airbyteMessageValue, + DateTimeFormatter.ofPattern(AIRBYTE_DATETIME_FORMAT) + ) return expectedDate == destinationDate } - override fun compareDateValues(expectedValue: String, actualValue: String): Boolean { - val destinationDate = parseDate(actualValue) + override fun compareDateValues(airbyteMessageValue: String, destinationValue: String): Boolean { + val destinationDate = parseDate(destinationValue) val expectedDate = - LocalDate.parse(expectedValue, DateTimeFormatter.ofPattern(AIRBYTE_DATE_FORMAT)) + LocalDate.parse(airbyteMessageValue, DateTimeFormatter.ofPattern(AIRBYTE_DATE_FORMAT)) return expectedDate == destinationDate } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.kt index 15e915181ffb..18bf1cb40fd8 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/AbstractSnowflakeTypingDedupingTest.kt @@ -10,6 +10,7 @@ import io.airbyte.cdk.db.jdbc.JdbcDatabase import io.airbyte.cdk.db.jdbc.JdbcUtils import io.airbyte.cdk.integrations.base.JavaBaseConstants import io.airbyte.commons.io.IOs.readFile +import io.airbyte.commons.json.Jsons import io.airbyte.commons.json.Jsons.deserialize import io.airbyte.integrations.base.destination.typing_deduping.BaseTypingDedupingTest import io.airbyte.integrations.base.destination.typing_deduping.SqlGenerator @@ -45,23 +46,24 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { val config = deserialize(readFile(Path.of(configPath))) (config as ObjectNode).put("schema", "typing_deduping_default_schema$uniqueSuffix") databaseName = config.get(JdbcUtils.DATABASE_KEY).asText() - dataSource = SnowflakeDatabase.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS) - database = SnowflakeDatabase.getDatabase(dataSource) + dataSource = + SnowflakeDatabaseUtils.createDataSource(config, OssCloudEnvVarConsts.AIRBYTE_OSS) + database = SnowflakeDatabaseUtils.getDatabase(dataSource) cleanAirbyteInternalTable(database) return config } @Throws(Exception::class) override fun dumpRawTableRecords(streamNamespace: String?, streamName: String): List { - var streamNamespace = streamNamespace - if (streamNamespace == null) { - streamNamespace = defaultSchema + var namespaceOrDefault = streamNamespace + if (namespaceOrDefault == null) { + namespaceOrDefault = defaultSchema } - val tableName: String = StreamId.concatenateRawTableName(streamNamespace, streamName) + val tableName: String = StreamId.concatenateRawTableName(namespaceOrDefault, streamName) val schema = rawSchema return SnowflakeTestUtils.dumpRawTable( database!!, // Explicitly wrap in quotes to prevent snowflake from upcasing - "\"$schema\".\"$tableName\"" + "\"$schema\".\"$tableName\"", ) } @@ -70,23 +72,23 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { streamNamespace: String?, streamName: String ): List { - var streamNamespace = streamNamespace - if (streamNamespace == null) { - streamNamespace = defaultSchema + var namespaceOrDefault = streamNamespace + if (namespaceOrDefault == null) { + namespaceOrDefault = defaultSchema } return SnowflakeTestUtils.dumpFinalTable( database!!, databaseName!!, - streamNamespace.uppercase(Locale.getDefault()), - streamName.uppercase(Locale.getDefault()) + namespaceOrDefault.uppercase(Locale.getDefault()), + streamName.uppercase(Locale.getDefault()), ) } @Throws(Exception::class) override fun teardownStreamAndNamespace(streamNamespace: String?, streamName: String) { - var streamNamespace = streamNamespace - if (streamNamespace == null) { - streamNamespace = defaultSchema + var namespaceOrDefault = streamNamespace + if (namespaceOrDefault == null) { + namespaceOrDefault = defaultSchema } database!!.execute( String.format( @@ -96,9 +98,9 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { """.trimIndent(), rawSchema, // Raw table is still lowercase. - StreamId.concatenateRawTableName(streamNamespace, streamName), - streamNamespace.uppercase(Locale.getDefault()) - ) + StreamId.concatenateRawTableName(namespaceOrDefault, streamName), + namespaceOrDefault.uppercase(Locale.getDefault()), + ), ) } @@ -117,111 +119,6 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { */ get() = JavaBaseConstants.DEFAULT_AIRBYTE_INTERNAL_NAMESPACE - /** - * Run a sync using 3.0.0 (which is the highest version that still creates v2 final tables with - * lowercased+quoted names). Then run a sync using our current version. - */ - @Test - @Throws(Exception::class) - open fun testFinalTableUppercasingMigration_append() { - try { - val catalog = - ConfiguredAirbyteCatalog() - .withStreams( - java.util.List.of( - ConfiguredAirbyteStream() - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.APPEND) - .withStream( - AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA) - ) - ) - ) - - // First sync - val messages1 = readMessages("dat/sync1_messages.jsonl") - runSync(catalog, messages1, "airbyte/destination-snowflake:3.0.0") - - // We no longer have the code to dump a lowercased table, so just move on directly to - // the new sync - - // Second sync - val messages2 = readMessages("dat/sync2_messages.jsonl") - - runSync(catalog, messages2) - - val expectedRawRecords2 = readRecords("dat/sync2_expectedrecords_raw_mixed_tzs.jsonl") - val expectedFinalRecords2 = - readRecords("dat/sync2_expectedrecords_fullrefresh_append_final.jsonl") - verifySyncResult( - expectedRawRecords2, - expectedFinalRecords2, - disableFinalTableComparison() - ) - } finally { - // manually drop the lowercased schema, since we no longer have the code to do it - // automatically - // (the raw table is still in lowercase "airbyte_internal"."whatever", so the - // auto-cleanup code - // handles it fine) - database!!.execute("DROP SCHEMA IF EXISTS \"$streamNamespace\" CASCADE") - } - } - - @Test - @Throws(Exception::class) - fun testFinalTableUppercasingMigration_overwrite() { - try { - val catalog = - ConfiguredAirbyteCatalog() - .withStreams( - java.util.List.of( - ConfiguredAirbyteStream() - .withSyncMode(SyncMode.FULL_REFRESH) - .withDestinationSyncMode(DestinationSyncMode.OVERWRITE) - .withStream( - AirbyteStream() - .withNamespace(streamNamespace) - .withName(streamName) - .withJsonSchema(SCHEMA) - ) - ) - ) - - // First sync - val messages1 = readMessages("dat/sync1_messages.jsonl") - runSync(catalog, messages1, "airbyte/destination-snowflake:3.0.0") - - // We no longer have the code to dump a lowercased table, so just move on directly to - // the new sync - - // Second sync - val messages2 = readMessages("dat/sync2_messages.jsonl") - - runSync(catalog, messages2) - - val expectedRawRecords2 = - readRecords("dat/sync2_expectedrecords_fullrefresh_overwrite_raw.jsonl") - val expectedFinalRecords2 = - readRecords("dat/sync2_expectedrecords_fullrefresh_overwrite_final.jsonl") - verifySyncResult( - expectedRawRecords2, - expectedFinalRecords2, - disableFinalTableComparison() - ) - } finally { - // manually drop the lowercased schema, since we no longer have the code to do it - // automatically - // (the raw table is still in lowercase "airbyte_internal"."whatever", so the - // auto-cleanup code - // handles it fine) - database!!.execute("DROP SCHEMA IF EXISTS \"$streamNamespace\" CASCADE") - } - } - @Test @Throws(Exception::class) open fun testRemovingPKNonNullIndexes() { @@ -237,17 +134,16 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { AirbyteStream() .withNamespace(streamNamespace) .withName(streamName) - .withJsonSchema(SCHEMA) - ) - ) + .withJsonSchema(SCHEMA), + ), + ), ) // First sync val messages = readMessages("dat/sync_null_pk.jsonl") - val e = - Assertions.assertThrows(TestHarnessException::class.java) { - runSync(catalog, messages, "airbyte/destination-snowflake:3.1.18") - } // this version introduced non-null PKs to the final tables + Assertions.assertThrows(TestHarnessException::class.java) { + runSync(catalog, messages, "airbyte/destination-snowflake:3.1.18") + } // this version introduced non-null PKs to the final tables // ideally we would assert on the logged content of the original exception within e, but // that is @@ -257,10 +153,70 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { runSync(catalog, messages) // does not throw with latest version Assertions.assertEquals( 1, - dumpFinalTableRecords(streamNamespace, streamName).toTypedArray().size + dumpFinalTableRecords(streamNamespace, streamName).toTypedArray().size, ) } + @Test + open fun testV1V2Migration() { + val catalog = + ConfiguredAirbyteCatalog() + .withStreams( + listOf( + ConfiguredAirbyteStream() + .withSyncMode(SyncMode.FULL_REFRESH) + .withDestinationSyncMode(DestinationSyncMode.APPEND) + .withSyncId(42L) + .withGenerationId(43L) + .withMinimumGenerationId(0L) + .withStream( + AirbyteStream() + .withNamespace(streamNamespace) + .withName(streamName) + .withJsonSchema(SCHEMA), + ), + ), + ) + + // First sync + val messages1 = readMessages("dat/sync1_messages.jsonl") + + runSync( + catalog, + messages1, + "airbyte/destination-snowflake:2.1.7", + ) { config: JsonNode? -> + // Defensive to avoid weird behaviors or test failures if the original config is being + // altered by + // another thread, thanks jackson for a mutable JsonNode + val copiedConfig = Jsons.clone(config!!) + if (config is ObjectNode) { + // Opt out of T+D to run old V1 sync + (copiedConfig as ObjectNode?)!!.put( + "use_1s1t_format", + false, + ) + } + copiedConfig + } + + // The record differ code is already adapted to V2 columns format, use the post V2 sync + // to verify that append mode preserved all the raw records and final records. + + // Second sync + val messages2 = readMessages("dat/sync2_messages.jsonl") + + runSync(catalog, messages2) + + val expectedRawRecords2 = + BaseTypingDedupingTest.readRecords("dat/sync2_expectedrecords_v1v2_raw.jsonl") + val expectedFinalRecords2 = + BaseTypingDedupingTest.readRecords( + "dat/sync2_expectedrecords_v1v2_fullrefresh_append_final.jsonl" + ) + verifySyncResult(expectedRawRecords2, expectedFinalRecords2, disableFinalTableComparison()) + } + @Test @Throws(Exception::class) fun testExtractedAtUtcTimezoneMigration() { @@ -277,9 +233,9 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { AirbyteStream() .withNamespace(streamNamespace) .withName(streamName) - .withJsonSchema(SCHEMA) - ) - ) + .withJsonSchema(SCHEMA), + ), + ), ) // First sync @@ -319,7 +275,7 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { "_airbyte_data", "_AIRBYTE_DATA", "_airbyte_meta", - "_AIRBYTE_META" + "_AIRBYTE_META", ) @Volatile private var cleanedAirbyteInternalTable = false @@ -330,7 +286,7 @@ abstract class AbstractSnowflakeTypingDedupingTest : BaseTypingDedupingTest() { synchronized(AbstractSnowflakeTypingDedupingTest::class.java) { if (!cleanedAirbyteInternalTable) { database!!.execute( - "DELETE FROM \"airbyte_internal\".\"_airbyte_destination_state\" WHERE \"updated_at\" < current_date() - 7" + "DELETE FROM \"airbyte_internal\".\"_airbyte_destination_state\" WHERE \"updated_at\" < current_date() - 7", ) cleanedAirbyteInternalTable = true } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest.kt index 95d19095d71b..a08a28843458 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest.kt @@ -7,8 +7,6 @@ import com.fasterxml.jackson.databind.JsonNode import com.fasterxml.jackson.databind.node.ObjectNode import io.airbyte.commons.json.Jsons.emptyObject import java.util.* -import org.junit.jupiter.api.Disabled -import org.junit.jupiter.api.Test class SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest : AbstractSnowflakeTypingDedupingTest() { @@ -35,13 +33,4 @@ class SnowflakeInternalStagingCaseInsensitiveTypingDedupingTest : } .toList() } - - @Disabled( - "This test assumes the ability to create case-sensitive tables, which is by definition not available with QUOTED_IDENTIFIERS_IGNORE_CASE=TRUE" - ) - @Test - @Throws(Exception::class) - override fun testFinalTableUppercasingMigration_append() { - super.testFinalTableUppercasingMigration_append() - } } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingLowercaseDatabaseTypingDedupingTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingLowercaseDatabaseTypingDedupingTest.kt index 441942709372..4aef040a33b7 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingLowercaseDatabaseTypingDedupingTest.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeInternalStagingLowercaseDatabaseTypingDedupingTest.kt @@ -8,6 +8,8 @@ import com.fasterxml.jackson.databind.node.ObjectNode import io.airbyte.cdk.db.jdbc.JdbcUtils import java.sql.SQLException import java.util.* +import org.junit.jupiter.api.Disabled +import org.junit.jupiter.api.Test class SnowflakeInternalStagingLowercaseDatabaseTypingDedupingTest : AbstractSnowflakeTypingDedupingTest() { @@ -28,4 +30,12 @@ class SnowflakeInternalStagingLowercaseDatabaseTypingDedupingTest : ) return config } + + @Disabled( + "upper casing the DB name was added as a fix in 2.1.7, Data in v1 lowercased DB will be lost" + ) + @Test + override fun testV1V2Migration() { + super.testV1V2Migration() + } } diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.kt index 92ddc945d3d9..5c26693ff20e 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorIntegrationTest.kt @@ -17,7 +17,7 @@ import io.airbyte.integrations.base.destination.typing_deduping.Sql import io.airbyte.integrations.base.destination.typing_deduping.StreamId import io.airbyte.integrations.base.destination.typing_deduping.TyperDeduperUtil.executeTypeAndDedupe import io.airbyte.integrations.destination.snowflake.OssCloudEnvVarConsts -import io.airbyte.integrations.destination.snowflake.SnowflakeDatabase +import io.airbyte.integrations.destination.snowflake.SnowflakeDatabaseUtils import io.airbyte.integrations.destination.snowflake.SnowflakeSourceOperations import io.airbyte.integrations.destination.snowflake.SnowflakeTestUtils import io.airbyte.integrations.destination.snowflake.SnowflakeTestUtils.dumpFinalTable @@ -44,7 +44,7 @@ class SnowflakeSqlGeneratorIntegrationTest : BaseSqlGeneratorIntegrationTest "($row)" } .collect(Collectors.joining(",")) - database!!.execute( + database.execute( StringSubstitutor( java.util.Map.of( "final_table_id", @@ -237,7 +237,7 @@ class SnowflakeSqlGeneratorIntegrationTest : BaseSqlGeneratorIntegrationTest "($row)" } .collect(Collectors.joining(",")) - database!!.execute( + database.execute( StringSubstitutor( java.util.Map.of( "raw_table_id", @@ -283,19 +283,19 @@ class SnowflakeSqlGeneratorIntegrationTest : BaseSqlGeneratorIntegrationTest { record: JsonNode -> record["kind"].asText() } .findFirst() val columns = - database!! + database .queryJsons( """ SELECT column_name, data_type, numeric_precision, numeric_scale @@ -307,15 +307,13 @@ class SnowflakeSqlGeneratorIntegrationTest : BaseSqlGeneratorIntegrationTest( - Function { record: JsonNode -> - record["COLUMN_NAME"].asText() - }, + Collectors.toMap( + { record: JsonNode -> record["COLUMN_NAME"].asText() }, Function toMap@{ record: JsonNode -> val type = record["DATA_TYPE"].asText() if (type == "NUMBER") { @@ -367,7 +365,7 @@ class SnowflakeSqlGeneratorIntegrationTest : BaseSqlGeneratorIntegrationTest if ("NULL" == v) v else StringUtils.wrap(v, "$$") } .collect(Collectors.joining(",")) } - .map { row: String? -> "(%s)".formatted(row) } + .map { row: String -> "($row)" } .collect(Collectors.joining(",")) val insert = StringSubstitutor( @@ -424,7 +422,7 @@ class SnowflakeSqlGeneratorIntegrationTest : BaseSqlGeneratorIntegrationTest( + return database.bufferedResultSetQuery( { connection: Connection -> connection .createStatement() @@ -577,33 +575,31 @@ class SnowflakeSqlGeneratorIntegrationTest : BaseSqlGeneratorIntegrationTest> = createTable.transactions .stream() - .map>( - Function { transaction: List -> - transaction - .stream() - .map { statement: String -> - Arrays.stream( - statement - .split(System.lineSeparator().toRegex()) - .dropLastWhile { it.isEmpty() } - .toTypedArray() + .map> { transaction: List -> + transaction + .stream() + .map { statement: String -> + Arrays.stream( + statement + .split(System.lineSeparator().toRegex()) + .dropLastWhile { it.isEmpty() } + .toTypedArray() + ) + .map { line: String -> + if ( + !line.contains("CLUSTER") && + (line.contains("id1") || + line.contains("id2") || + line.contains("ID1") || + line.contains("ID2")) ) - .map { line: String -> - if ( - !line.contains("CLUSTER") && - (line.contains("id1") || - line.contains("id2") || - line.contains("ID1") || - line.contains("ID2")) - ) - line.replace(",", " NOT NULL,") - else line - } - .collect(Collectors.joining("\r\n")) - } - .toList() - } - ) + line.replace(",", " NOT NULL,") + else line + } + .collect(Collectors.joining("\r\n")) + } + .toList() + } .toList() destinationHandler.execute(Sql(createTableModified)) initialStates = @@ -1844,15 +1840,15 @@ class SnowflakeSqlGeneratorIntegrationTest : BaseSqlGeneratorIntegrationTest?"} + +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 200, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Alice", "ADDRESS": {"city": "Seattle", "state": "WA"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:00:00.000000000Z", "NAME": "Bob", "ADDRESS": {"city": "New York", "state": "NY"}} +{"_AIRBYTE_EXTRACTED_AT": "1970-01-01T00:00:02.000000000Z", "_AIRBYTE_META":{"errors":[]}, "ID1": 1, "ID2": 201, "UPDATED_AT": "2000-01-02T00:01:00.000000000Z", "_AB_CDC_DELETED_AT": "1970-01-01T00:00:00.000000000Z"} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_v1v2_raw.jsonl b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_v1v2_raw.jsonl new file mode 100644 index 000000000000..1e98c285f68b --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test-integration/resources/dat/sync2_expectedrecords_v1v2_raw.jsonl @@ -0,0 +1,10 @@ +// We keep the records from the first sync +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "San Francisco", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-01T00:01:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Los Angeles", "state": "CA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-01T00:02:00Z", "name": "Bob", "address": {"city": "Boston", "state": "MA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 2, "id2": 200, "updated_at": "2000-01-01T00:03:00Z", "name": "Charlie", "age": 42, "registration_date": "2023-12-23"}} +{"_airbyte_extracted_at": "1970-01-01T00:00:01.000000000-08:00", "_airbyte_data": {"id1": 3, "id2": 200, "updated_at": "2000-01-01T00:04:00Z", "name": "a\bb\fc\nd\re\tf`~!@#$%^&*()_+-=[]\\{}|'\",./<>?"}} +// And append the records from the second sync +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 200, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Alice", "address": {"city": "Seattle", "state": "WA"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:00:00Z", "_ab_cdc_deleted_at": null, "name": "Bob", "address": {"city": "New York", "state": "NY"}}} +{"_airbyte_extracted_at": "1970-01-01T00:00:02.000000000Z", "_airbyte_data": {"id1": 1, "id2": 201, "updated_at": "2000-01-02T00:01:00Z", "_ab_cdc_deleted_at": "1970-01-01T00:00:00Z"}} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.kt index 216acf0cee5e..f33e6dba82bc 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeDestinationTest.kt @@ -3,19 +3,12 @@ */ package io.airbyte.integrations.destination.snowflake -import com.fasterxml.jackson.databind.JsonNode import io.airbyte.cdk.integrations.base.DestinationConfig -import io.airbyte.cdk.integrations.destination.async.AsyncStreamConsumer -import io.airbyte.commons.json.Jsons.deserialize import io.airbyte.commons.json.Jsons.emptyObject -import io.airbyte.commons.resources.MoreResources.readResource -import io.airbyte.protocol.models.v0.AirbyteMessage -import io.airbyte.protocol.models.v0.ConfiguredAirbyteCatalog import java.util.regex.Pattern import java.util.stream.Stream import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.Arguments import org.junit.jupiter.params.provider.MethodSource @@ -38,18 +31,6 @@ class SnowflakeDestinationTest { Assertions.assertEquals(isMatch, matcher.find()) } - @Test - @Throws(Exception::class) - fun testWriteSnowflakeInternal() { - val config = deserialize(readResource("internal_staging_config.json"), JsonNode::class.java) - val consumer = - SnowflakeDestination(OssCloudEnvVarConsts.AIRBYTE_OSS).getSerializedMessageConsumer( - config, - ConfiguredAirbyteCatalog() - ) { _: AirbyteMessage? -> } - Assertions.assertEquals(AsyncStreamConsumer::class.java, consumer.javaClass) - } - companion object { @JvmStatic private fun urlsDataProvider(): Stream { diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.kt deleted file mode 100644 index fa0fcdfbb63c..000000000000 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeInternalStagingSqlOperationsTest.kt +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ -package io.airbyte.integrations.destination.snowflake - -import io.airbyte.cdk.integrations.base.DestinationConfig -import io.airbyte.commons.json.Jsons.emptyObject -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test - -internal class SnowflakeInternalStagingSqlOperationsTest { - private var snowflakeStagingSqlOperations: SnowflakeInternalStagingSqlOperations? = null - - @BeforeEach - fun setup() { - DestinationConfig.initialize(emptyObject()) - snowflakeStagingSqlOperations = - SnowflakeInternalStagingSqlOperations(SnowflakeSQLNameTransformer()) - } - - @Test - fun createStageIfNotExists() { - val actualCreateStageQuery = snowflakeStagingSqlOperations!!.getCreateStageQuery(STAGE_NAME) - val expectedCreateStageQuery = - "CREATE STAGE IF NOT EXISTS " + - STAGE_NAME + - " encryption = (type = 'SNOWFLAKE_SSE') copy_options = (on_error='skip_file');" - Assertions.assertEquals(expectedCreateStageQuery, actualCreateStageQuery) - } - - @Test - fun putFileToStage() { - val expectedQuery = - "PUT file://" + FILE_PATH + " @" + STAGE_NAME + "/" + STAGE_PATH + " PARALLEL =" - val actualPutQuery = - snowflakeStagingSqlOperations!!.getPutQuery(STAGE_NAME, STAGE_PATH, FILE_PATH) - Assertions.assertTrue(actualPutQuery.startsWith(expectedQuery)) - } - - @Test - fun listStage() { - val expectedQuery = "LIST @" + STAGE_NAME + "/" + STAGE_PATH + FILE_PATH + ";" - val actualListQuery = - snowflakeStagingSqlOperations!!.getListQuery(STAGE_NAME, STAGE_PATH, FILE_PATH) - Assertions.assertEquals(expectedQuery, actualListQuery) - } - - @Test - fun copyIntoTmpTableFromStage() { - val expectedQuery = - """ - COPY INTO "schemaName"."tableName" FROM '@stageName/stagePath/2022/' - file_format = ( - type = csv - compression = auto - field_delimiter = ',' - skip_header = 0 - FIELD_OPTIONALLY_ENCLOSED_BY = '"' - NULL_IF=('') - error_on_column_count_mismatch=false - ) files = ('filename1','filename2'); - """.trimIndent() - val actualCopyQuery = - snowflakeStagingSqlOperations!!.getCopyQuery( - STAGE_NAME, - STAGE_PATH, - listOf("filename1", "filename2"), - "tableName", - SCHEMA_NAME - ) - Assertions.assertEquals(expectedQuery, actualCopyQuery) - } - - @Test - fun dropStageIfExists() { - val expectedQuery = "DROP STAGE IF EXISTS " + STAGE_NAME + ";" - val actualDropQuery = snowflakeStagingSqlOperations!!.getDropQuery(STAGE_NAME) - Assertions.assertEquals(expectedQuery, actualDropQuery) - } - - @Test - fun removeStage() { - val expectedQuery = "REMOVE @" + STAGE_NAME + ";" - val actualRemoveQuery = snowflakeStagingSqlOperations!!.getRemoveQuery(STAGE_NAME) - Assertions.assertEquals(expectedQuery, actualRemoveQuery) - } - - companion object { - private const val SCHEMA_NAME = "schemaName" - private const val STAGE_NAME = "stageName" - private const val STAGE_PATH = "stagePath/2022/" - private const val FILE_PATH = "filepath/filename" - } -} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.kt deleted file mode 100644 index d3fc55c6ad81..000000000000 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsTest.kt +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ -package io.airbyte.integrations.destination.snowflake - -import edu.umd.cs.findbugs.annotations.SuppressFBWarnings -import io.airbyte.cdk.db.jdbc.JdbcDatabase -import io.airbyte.cdk.integrations.base.DestinationConfig -import io.airbyte.cdk.integrations.base.JavaBaseConstants -import io.airbyte.cdk.integrations.destination.async.model.PartialAirbyteMessage -import io.airbyte.commons.functional.CheckedConsumer -import io.airbyte.commons.json.Jsons.emptyObject -import java.sql.Connection -import java.sql.SQLException -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test -import org.mockito.ArgumentMatchers -import org.mockito.Mockito -import org.mockito.Mockito.mock -import org.mockito.kotlin.any - -internal class SnowflakeSqlOperationsTest { - private var snowflakeSqlOperations: SnowflakeSqlOperations? = null - var db: JdbcDatabase = mock() - - @BeforeEach - fun setup() { - DestinationConfig.initialize(emptyObject()) - snowflakeSqlOperations = SnowflakeSqlOperations() - } - - @Test - fun createTableQuery() { - val expectedQuery = - String.format( - """ - CREATE TABLE IF NOT EXISTS "%s"."%s" ( - "%s" VARCHAR PRIMARY KEY, - "%s" TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp(), - "%s" TIMESTAMP WITH TIME ZONE DEFAULT NULL, - "%s" VARIANT - ) data_retention_time_in_days = 1; - """.trimIndent(), - SCHEMA_NAME, - TABLE_NAME, - JavaBaseConstants.COLUMN_NAME_AB_RAW_ID, - JavaBaseConstants.COLUMN_NAME_AB_EXTRACTED_AT, - JavaBaseConstants.COLUMN_NAME_AB_LOADED_AT, - JavaBaseConstants.COLUMN_NAME_DATA - ) - val actualQuery = snowflakeSqlOperations!!.createTableQuery(db, SCHEMA_NAME, TABLE_NAME) - Assertions.assertEquals(expectedQuery, actualQuery) - } - - @Throws(Exception::class) - @Test - fun testSchemaExists() { - snowflakeSqlOperations!!.isSchemaExists(db, SCHEMA_NAME) - Mockito.verify(db, Mockito.times(1)).unsafeQuery(ArgumentMatchers.anyString()) - } - - @Test - @Throws(SQLException::class) - @SuppressFBWarnings("BC_IMPOSSIBLE_CAST") - fun insertRecordsInternal() { - snowflakeSqlOperations!!.insertRecordsInternal( - db, - listOf(PartialAirbyteMessage()), - SCHEMA_NAME, - TABLE_NAME - ) - Mockito.verify(db, Mockito.times(1)) - .execute(any>()) - } - - companion object { - var SCHEMA_NAME: String = "schemaName" - const val TABLE_NAME: String = "tableName" - } -} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsThrowConfigExceptionTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsThrowConfigExceptionTest.kt deleted file mode 100644 index 9c2302ae1f24..000000000000 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/SnowflakeSqlOperationsThrowConfigExceptionTest.kt +++ /dev/null @@ -1,208 +0,0 @@ -/* - * Copyright (c) 2023 Airbyte, Inc., all rights reserved. - */ -package io.airbyte.integrations.destination.snowflake - -import io.airbyte.cdk.db.jdbc.JdbcDatabase -import io.airbyte.cdk.integrations.base.DestinationConfig -import io.airbyte.commons.exceptions.ConfigErrorException -import io.airbyte.commons.json.Jsons.emptyObject -import java.sql.SQLException -import java.util.stream.Stream -import net.snowflake.client.jdbc.SnowflakeSQLException -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.BeforeAll -import org.junit.jupiter.api.function.Executable -import org.junit.jupiter.params.ParameterizedTest -import org.junit.jupiter.params.provider.Arguments -import org.junit.jupiter.params.provider.MethodSource -import org.mockito.Mockito - -/** - * This class contains tests to make sure we catch some Snowflake's exceptions and convert them to - * Airbyte Config Exception (Ex. User has no required permission, User's IP is not in Whitelist, - * etc) - */ -internal class SnowflakeSqlOperationsThrowConfigExceptionTest { - @ParameterizedTest - @MethodSource("testArgumentsForDbExecute") - fun testCatchNoPermissionOnExecuteException( - message: String, - shouldCapture: Boolean, - executable: Executable - ) { - try { - Mockito.doThrow(SnowflakeSQLException(message)) - .`when`(dbForExecuteQuery) - .execute(Mockito.anyString()) - } catch (e: SQLException) { - // This would not be expected, but the `execute` method above will flag as an unhandled - // exception - assert(false) - } - executeTest(message, shouldCapture, executable) - } - - @ParameterizedTest - @MethodSource("testArgumentsForDbUnsafeQuery") - fun testCatchNoPermissionOnUnsafeQueryException( - message: String, - shouldCapture: Boolean, - executable: Executable - ) { - try { - Mockito.doThrow(SnowflakeSQLException(message)) - .`when`(dbForRunUnsafeQuery) - .unsafeQuery(Mockito.anyString()) - } catch (e: SQLException) { - // This would not be expected, but the `execute` method above will flag as an unhandled - // exception - assert(false) - } - executeTest(message, shouldCapture, executable) - } - - private fun executeTest(message: String, shouldCapture: Boolean, executable: Executable) { - val exception = Assertions.assertThrows(Exception::class.java, executable) - if (shouldCapture) { - Assertions.assertInstanceOf(ConfigErrorException::class.java, exception) - } else { - Assertions.assertInstanceOf(SnowflakeSQLException::class.java, exception) - Assertions.assertEquals(exception.message, message) - } - } - - companion object { - private const val SCHEMA_NAME = "dummySchemaName" - private const val STAGE_NAME = "dummyStageName" - private const val TABLE_NAME = "dummyTableName" - private const val STAGE_PATH = "stagePath/2022/" - private val FILE_PATH = listOf("filepath/filename") - - private const val TEST_NO_CONFIG_EXCEPTION_CATCHED = "TEST" - private const val TEST_PERMISSION_EXCEPTION_CATCHED = - "but current role has no privileges on it" - private const val TEST_IP_NOT_IN_WHITE_LIST_EXCEPTION_CATCHED = - "not allowed to access Snowflake" - - private var snowflakeStagingSqlOperations: SnowflakeInternalStagingSqlOperations? = null - - private var snowflakeSqlOperations: SnowflakeSqlOperations? = null - - private val dbForExecuteQuery: JdbcDatabase = Mockito.mock(JdbcDatabase::class.java) - private val dbForRunUnsafeQuery: JdbcDatabase = Mockito.mock(JdbcDatabase::class.java) - - private var createStageIfNotExists: Executable? = null - private var dropStageIfExists: Executable? = null - private var copyIntoTableFromStage: Executable? = null - - private var createSchemaIfNotExists: Executable? = null - private var isSchemaExists: Executable? = null - private var createTableIfNotExists: Executable? = null - private var dropTableIfExists: Executable? = null - - @JvmStatic - @BeforeAll - fun setup(): Unit { - DestinationConfig.initialize(emptyObject()) - - snowflakeStagingSqlOperations = - SnowflakeInternalStagingSqlOperations(SnowflakeSQLNameTransformer()) - snowflakeSqlOperations = SnowflakeSqlOperations() - - createStageIfNotExists = Executable { - snowflakeStagingSqlOperations!!.createStageIfNotExists( - dbForExecuteQuery, - STAGE_NAME - ) - } - dropStageIfExists = Executable { - snowflakeStagingSqlOperations!!.dropStageIfExists( - dbForExecuteQuery, - STAGE_NAME, - null - ) - } - copyIntoTableFromStage = Executable { - snowflakeStagingSqlOperations!!.copyIntoTableFromStage( - dbForExecuteQuery, - STAGE_NAME, - STAGE_PATH, - FILE_PATH, - TABLE_NAME, - SCHEMA_NAME - ) - } - - createSchemaIfNotExists = Executable { - snowflakeSqlOperations!!.createSchemaIfNotExists(dbForExecuteQuery, SCHEMA_NAME) - } - isSchemaExists = Executable { - snowflakeSqlOperations!!.isSchemaExists(dbForRunUnsafeQuery, SCHEMA_NAME) - } - createTableIfNotExists = Executable { - snowflakeSqlOperations!!.createTableIfNotExists( - dbForExecuteQuery, - SCHEMA_NAME, - TABLE_NAME - ) - } - dropTableIfExists = Executable { - snowflakeSqlOperations!!.dropTableIfExists( - dbForExecuteQuery, - SCHEMA_NAME, - TABLE_NAME - ) - } - } - - @JvmStatic - private fun testArgumentsForDbExecute(): Stream { - return Stream.of( - Arguments.of(TEST_NO_CONFIG_EXCEPTION_CATCHED, false, createStageIfNotExists), - Arguments.of(TEST_PERMISSION_EXCEPTION_CATCHED, true, createStageIfNotExists), - Arguments.of( - TEST_IP_NOT_IN_WHITE_LIST_EXCEPTION_CATCHED, - true, - createStageIfNotExists - ), - Arguments.of(TEST_NO_CONFIG_EXCEPTION_CATCHED, false, dropStageIfExists), - Arguments.of(TEST_PERMISSION_EXCEPTION_CATCHED, true, dropStageIfExists), - Arguments.of(TEST_IP_NOT_IN_WHITE_LIST_EXCEPTION_CATCHED, true, dropStageIfExists), - Arguments.of(TEST_NO_CONFIG_EXCEPTION_CATCHED, false, copyIntoTableFromStage), - Arguments.of(TEST_PERMISSION_EXCEPTION_CATCHED, true, copyIntoTableFromStage), - Arguments.of( - TEST_IP_NOT_IN_WHITE_LIST_EXCEPTION_CATCHED, - true, - copyIntoTableFromStage - ), - Arguments.of(TEST_NO_CONFIG_EXCEPTION_CATCHED, false, createSchemaIfNotExists), - Arguments.of(TEST_PERMISSION_EXCEPTION_CATCHED, true, createSchemaIfNotExists), - Arguments.of( - TEST_IP_NOT_IN_WHITE_LIST_EXCEPTION_CATCHED, - true, - createSchemaIfNotExists - ), - Arguments.of(TEST_NO_CONFIG_EXCEPTION_CATCHED, false, createTableIfNotExists), - Arguments.of(TEST_PERMISSION_EXCEPTION_CATCHED, true, createTableIfNotExists), - Arguments.of( - TEST_IP_NOT_IN_WHITE_LIST_EXCEPTION_CATCHED, - true, - createTableIfNotExists - ), - Arguments.of(TEST_NO_CONFIG_EXCEPTION_CATCHED, false, dropTableIfExists), - Arguments.of(TEST_PERMISSION_EXCEPTION_CATCHED, true, dropTableIfExists), - Arguments.of(TEST_IP_NOT_IN_WHITE_LIST_EXCEPTION_CATCHED, true, dropTableIfExists) - ) - } - - @JvmStatic - private fun testArgumentsForDbUnsafeQuery(): Stream { - return Stream.of( - Arguments.of(TEST_NO_CONFIG_EXCEPTION_CATCHED, false, isSchemaExists), - Arguments.of(TEST_PERMISSION_EXCEPTION_CATCHED, true, isSchemaExists), - Arguments.of(TEST_IP_NOT_IN_WHITE_LIST_EXCEPTION_CATCHED, true, isSchemaExists) - ) - } - } -} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStagingClientTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStagingClientTest.kt new file mode 100644 index 000000000000..a608f4ff7d0c --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStagingClientTest.kt @@ -0,0 +1,328 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake.operation + +import com.fasterxml.jackson.databind.JsonNode +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.cdk.integrations.destination.record_buffer.SerializableBuffer +import io.airbyte.commons.exceptions.ConfigErrorException +import io.airbyte.commons.json.Jsons +import io.airbyte.integrations.base.destination.typing_deduping.StreamId +import java.io.File +import java.sql.SQLException +import java.util.stream.Stream +import net.snowflake.client.jdbc.SnowflakeSQLException +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Nested +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.function.Executable +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.MethodSource +import org.mockito.kotlin.any +import org.mockito.kotlin.doNothing +import org.mockito.kotlin.doThrow +import org.mockito.kotlin.inOrder +import org.mockito.kotlin.mock +import org.mockito.kotlin.reset +import org.mockito.kotlin.times +import org.mockito.kotlin.verify +import org.mockito.kotlin.verifyNoMoreInteractions +import org.mockito.kotlin.whenever + +@SuppressFBWarnings("BC_IMPOSSIBLE_CAST") +class SnowflakeStagingClientTest { + + @Nested + inner class SuccessTest { + private val database = mock() + private val stagingClient = SnowflakeStagingClient(database) + + @BeforeEach + fun setUp() { + // checkIfStage exists should be the only call to mock. it checks if any object + // exists. + whenever(database.unsafeQuery(any())).thenReturn(listOf(Jsons.emptyObject()).stream()) + } + + @AfterEach + fun tearDown() { + reset(database) + } + + @Test + fun verifyUploadRecordsToStage() { + val mockFileName = "mock-file-name" + val mockFileAbsolutePath = "/tmp/$mockFileName" + val mockFile = mock { whenever(it.absolutePath).thenReturn(mockFileAbsolutePath) } + val recordBuffer = + mock() { + whenever(it.filename).thenReturn(mockFileName) + whenever(it.file).thenReturn(mockFile) + } + + val stageName = "dummy" + val stagingPath = "2024/uuid-random" + + val putQuery = stagingClient.getPutQuery(stageName, stagingPath, mockFileAbsolutePath) + val listQuery = stagingClient.getListQuery(stageName, stagingPath, mockFileName) + val stagedFile = + stagingClient.uploadRecordsToStage(recordBuffer, stageName, stagingPath) + assertEquals(stagedFile, mockFileName) + val inOrder = inOrder(database) + inOrder.verify(database).execute(putQuery) + inOrder.verify(database).unsafeQuery(listQuery) + verifyNoMoreInteractions(database) + } + + @Test + fun verifyCreateStageIfNotExists() { + val stageName = "dummy" + stagingClient.createStageIfNotExists(stageName) + val inOrder = inOrder(database) + inOrder.verify(database).execute(stagingClient.getCreateStageQuery(stageName)) + verifyNoMoreInteractions(database) + } + + @Test + fun verifyCopyIntoTableFromStage() { + val stageName = "dummy" + val stagingPath = "2024/uuid-random" + val stagedFiles = listOf("mock-file-name") + stagingClient.copyIntoTableFromStage(stageName, stagingPath, stagedFiles, streamId) + val inOrder = inOrder(database) + inOrder + .verify(database) + .execute(stagingClient.getCopyQuery(stageName, stagingPath, stagedFiles, streamId)) + verifyNoMoreInteractions(database) + } + + @Test + fun verifyDropStageIfExists() { + val stageName = "dummy" + stagingClient.dropStageIfExists(stageName) + val inOrder = inOrder(database) + inOrder.verify(database).execute(stagingClient.getDropQuery(stageName)) + verifyNoMoreInteractions(database) + } + } + + @Nested + inner class FailureTest { + + @Test + fun verifyUploadToStageRetriedOnFileNotFound() { + val database = + mock() { + doNothing().whenever(it).execute(any()) + whenever(it.unsafeQuery(any())).thenReturn(listOf().stream()) + } + val stagingClient = SnowflakeStagingClient(database) + + val mockFileName = "mock-file-name" + val mockFileAbsolutePath = "/tmp/$mockFileName" + val mockFile = + mock { file -> whenever(file.absolutePath).thenReturn(mockFileAbsolutePath) } + val recordBuffer = + mock { + whenever(it.filename).thenReturn(mockFileName) + whenever(it.file).thenReturn(mockFile) + } + val stageName = "dummy" + val stagingPath = "2024/uuid-random" + + val putQuery = stagingClient.getPutQuery(stageName, stagingPath, mockFileAbsolutePath) + val listQuery = stagingClient.getListQuery(stageName, stagingPath, mockFileName) + assertThrows(RuntimeException::class.java) { + stagingClient.uploadRecordsToStage(recordBuffer, stageName, stagingPath) + } + verify(database, times(3)).execute(putQuery) + verify(database, times(3)).unsafeQuery(listQuery) + verifyNoMoreInteractions(database) + } + + @Test + fun verifyUploadToStageRetriedOnException() { + val database = + mock() { + doThrow(SQLException("Query can't be executed")) + .whenever(it) + .execute(any()) + } + val stagingClient = SnowflakeStagingClient(database) + + val mockFileName = "mock-file-name" + val mockFileAbsolutePath = "/tmp/$mockFileName" + val mockFile = + mock { file -> whenever(file.absolutePath).thenReturn(mockFileAbsolutePath) } + val recordBuffer = + mock { + whenever(it.filename).thenReturn(mockFileName) + whenever(it.file).thenReturn(mockFile) + } + val stageName = "dummy" + val stagingPath = "2024/uuid-random" + + val putQuery = stagingClient.getPutQuery(stageName, stagingPath, mockFileAbsolutePath) + assertThrows(RuntimeException::class.java) { + stagingClient.uploadRecordsToStage(recordBuffer, stageName, stagingPath) + } + verify(database, times(3)).execute(putQuery) + verifyNoMoreInteractions(database) + } + + @ParameterizedTest(name = ParameterizedTest.ARGUMENTS_WITH_NAMES_PLACEHOLDER) + @MethodSource( + "io.airbyte.integrations.destination.snowflake.operation.SnowflakeStagingClientTest#argumentsForCheckKnownExceptionCaught" + ) + fun verifyKnownExceptionConvertedToConfigException( + isCaptured: Boolean, + executable: Executable + ) { + if (isCaptured) { + assertThrows(ConfigErrorException::class.java, executable) + } else { + assertThrows(SnowflakeSQLException::class.java, executable) + } + } + } + + companion object { + private const val UNKNOWN_EXCEPTION_MESSAGE = "Unknown Exception" + private const val PERMISSION_EXCEPTION_PARTIAL_MSG = + "but current role has no privileges on it" + private const val IP_NOT_IN_WHITE_LIST_EXCEPTION_PARTIAL_MSG = + "not allowed to access Snowflake" + + val streamId = + StreamId( + "final_namespace", + "final_name", + "raw_namespace", + "raw_name", + "original_namespace", + "original_name", + ) + + @JvmStatic + fun argumentsForCheckKnownExceptionCaught(): Stream { + val mockStageName = "dummy-stage-name" + val mockStagingPath = "2024/uuid-random" + val mockFileName = "mock-file-name" + return Stream.of( + Arguments.of( + false, + Executable { + getMockedStagingClientWithExceptionThrown( + UNKNOWN_EXCEPTION_MESSAGE, + ) + .createStageIfNotExists(mockStageName) + }, + ), + Arguments.of( + true, + Executable { + getMockedStagingClientWithExceptionThrown( + IP_NOT_IN_WHITE_LIST_EXCEPTION_PARTIAL_MSG, + ) + .createStageIfNotExists(mockStageName) + }, + ), + Arguments.of( + true, + Executable { + getMockedStagingClientWithExceptionThrown(PERMISSION_EXCEPTION_PARTIAL_MSG) + .createStageIfNotExists(mockStageName) + } + ), + Arguments.of( + false, + Executable { + getMockedStagingClientWithExceptionThrown( + UNKNOWN_EXCEPTION_MESSAGE, + ) + .copyIntoTableFromStage( + mockStageName, + mockStagingPath, + listOf(mockFileName), + streamId + ) + }, + ), + Arguments.of( + true, + Executable { + getMockedStagingClientWithExceptionThrown( + IP_NOT_IN_WHITE_LIST_EXCEPTION_PARTIAL_MSG, + ) + .copyIntoTableFromStage( + mockStageName, + mockStagingPath, + listOf(mockFileName), + streamId + ) + }, + ), + Arguments.of( + true, + Executable { + getMockedStagingClientWithExceptionThrown( + PERMISSION_EXCEPTION_PARTIAL_MSG, + ) + .copyIntoTableFromStage( + mockStageName, + mockStagingPath, + listOf(mockFileName), + streamId + ) + }, + ), + Arguments.of( + false, + Executable { + getMockedStagingClientWithExceptionThrown( + UNKNOWN_EXCEPTION_MESSAGE, + ) + .dropStageIfExists(mockStageName) + }, + ), + Arguments.of( + true, + Executable { + getMockedStagingClientWithExceptionThrown( + IP_NOT_IN_WHITE_LIST_EXCEPTION_PARTIAL_MSG, + ) + .dropStageIfExists(mockStageName) + }, + ), + Arguments.of( + true, + Executable { + getMockedStagingClientWithExceptionThrown( + PERMISSION_EXCEPTION_PARTIAL_MSG, + ) + .dropStageIfExists(mockStageName) + }, + ), + ) + } + + private fun getMockedStagingClientWithExceptionThrown( + exceptionMessage: String + ): SnowflakeStagingClient { + val database = + mock { + doThrow(SnowflakeSQLException(exceptionMessage)) + .whenever(it) + .execute(any()) + } + return SnowflakeStagingClient(database) + } + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStorageOperationTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStorageOperationTest.kt new file mode 100644 index 000000000000..f862972969ab --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/operation/SnowflakeStorageOperationTest.kt @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake.operation + +import edu.umd.cs.findbugs.annotations.SuppressFBWarnings +import io.airbyte.cdk.integrations.destination.s3.csv.CsvSerializedBuffer +import io.airbyte.integrations.base.destination.typing_deduping.Sql +import io.airbyte.integrations.base.destination.typing_deduping.StreamId +import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeDestinationHandler +import io.airbyte.integrations.destination.snowflake.typing_deduping.SnowflakeSqlGenerator +import io.airbyte.protocol.models.v0.DestinationSyncMode +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Test +import org.mockito.Mockito.mock +import org.mockito.Mockito.reset +import org.mockito.Mockito.verifyNoMoreInteractions +import org.mockito.Mockito.`when` +import org.mockito.kotlin.any +import org.mockito.kotlin.doReturn +import org.mockito.kotlin.eq +import org.mockito.kotlin.inOrder + +@SuppressFBWarnings("BC_IMPOSSIBLE_CAST") +class SnowflakeStorageOperationTest { + private val sqlGenerator = mock(SnowflakeSqlGenerator::class.java) + private val destinationHandler = mock(SnowflakeDestinationHandler::class.java) + private val stagingClient = mock(SnowflakeStagingClient::class.java) + private val storageOperation: SnowflakeStorageOperation = + SnowflakeStorageOperation(sqlGenerator, destinationHandler, 1, stagingClient) + + @AfterEach + fun tearDown() { + reset(sqlGenerator) + reset(destinationHandler) + reset(stagingClient) + } + + @Test + fun verifyPrepareStageCreatesTableAndStage() { + val inOrder = inOrder(destinationHandler, stagingClient) + storageOperation.prepareStage(streamId, DestinationSyncMode.APPEND) + inOrder + .verify(destinationHandler) + .execute(Sql.of(storageOperation.createTableQuery(streamId))) + inOrder + .verify(stagingClient) + .createStageIfNotExists(storageOperation.getStageName(streamId)) + verifyNoMoreInteractions(destinationHandler, stagingClient) + } + + @Test + fun verifyPrepareStageOverwriteTruncatesTable() { + val inOrder = inOrder(destinationHandler, stagingClient) + storageOperation.prepareStage(streamId, DestinationSyncMode.OVERWRITE) + inOrder + .verify(destinationHandler) + .execute(Sql.of(storageOperation.createTableQuery(streamId))) + inOrder + .verify(destinationHandler) + .execute(Sql.of(storageOperation.truncateTableQuery(streamId))) + inOrder + .verify(stagingClient) + .createStageIfNotExists(storageOperation.getStageName(streamId)) + verifyNoMoreInteractions(destinationHandler, stagingClient) + } + + @Test + fun verifyWriteToStage() { + val mockTmpFileName = "random-tmp-file-name" + val data = mock(CsvSerializedBuffer::class.java) + `when`(data.filename).thenReturn(mockTmpFileName) + val stageName = storageOperation.getStageName(streamId) + // stagingPath has UUID which isn't injected atm. + val stagingClient = mock(SnowflakeStagingClient::class.java) + doReturn(mockTmpFileName).`when`(stagingClient).uploadRecordsToStage(any(), any(), any()) + val storageOperation = + SnowflakeStorageOperation(sqlGenerator, destinationHandler, 1, stagingClient) + + storageOperation.writeToStage(streamId, data) + val inOrder = inOrder(stagingClient) + inOrder.verify(stagingClient).uploadRecordsToStage(any(), eq(stageName), any()) + inOrder + .verify(stagingClient) + .copyIntoTableFromStage(eq(stageName), any(), eq(listOf(mockTmpFileName)), eq(streamId)) + verifyNoMoreInteractions(stagingClient) + } + + @Test + fun verifyCleanUpStage() { + storageOperation.cleanupStage(streamId) + val inOrder = inOrder(stagingClient) + inOrder.verify(stagingClient).dropStageIfExists(eq(storageOperation.getStageName(streamId))) + verifyNoMoreInteractions(stagingClient) + } + + companion object { + val streamId = + StreamId( + "final_namespace", + "final_name", + "raw_namespace", + "raw_name", + "original_namespace", + "original_name", + ) + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandlerTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandlerTest.kt new file mode 100644 index 000000000000..76eb41d75e7d --- /dev/null +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeDestinationHandlerTest.kt @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.integrations.destination.snowflake.typing_deduping + +import io.airbyte.cdk.db.jdbc.JdbcDatabase +import io.airbyte.commons.exceptions.ConfigErrorException +import io.airbyte.commons.json.Jsons +import io.airbyte.integrations.base.destination.typing_deduping.Sql +import java.util.stream.Stream +import net.snowflake.client.jdbc.SnowflakeSQLException +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.assertThrows +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.function.Executable +import org.junit.jupiter.params.ParameterizedTest +import org.junit.jupiter.params.provider.Arguments +import org.junit.jupiter.params.provider.MethodSource +import org.mockito.Mockito.any +import org.mockito.Mockito.anyString +import org.mockito.Mockito.doThrow +import org.mockito.Mockito.mock +import org.mockito.Mockito.reset +import org.mockito.Mockito.times +import org.mockito.Mockito.verify +import org.mockito.Mockito.`when` +import org.mockito.kotlin.eq + +class SnowflakeDestinationHandlerTest { + + private val database = mock(JdbcDatabase::class.java) + private val destinationHandler = + SnowflakeDestinationHandler("mock-database-name", database, "mock-schema") + + @ParameterizedTest + @MethodSource("argumentsForExceptionThrownWithExecute") + fun verifyExecuteKnownExceptionsAreReportedAsConfigError( + message: String, + isConfigErrorException: Boolean + ) { + doThrow(SnowflakeSQLException(message)).`when`(database).execute(any(String::class.java)) + executeAndAssertThrowable( + isConfigErrorException, + ) { + destinationHandler.execute(Sql.of("Mock SQL statement")) + } + } + private fun executeAndAssertThrowable(isConfigErrorException: Boolean, executable: Executable) { + if (isConfigErrorException) { + assertThrows(ConfigErrorException::class.java, executable) + } else { + assertThrows(RuntimeException::class.java, executable) + } + } + @Test + fun verifyCreateNamespaceChecksForSchemaExistence() { + val mockSchemaName = "mockSchema" + val showSchemasReturn = """ + {"name":"$mockSchemaName"} + """.trimIndent() + `when`(database.unsafeQuery(eq("show schemas;"))) + .thenReturn( + listOf( + Jsons.deserialize( + showSchemasReturn, + ), + ) + .stream(), + ) + destinationHandler.createNamespaces(setOf(mockSchemaName)) + // verify database.execute is not called + verify(database, times(0)).execute(anyString()) + } + + @AfterEach + fun tearDown() { + reset(database) + } + + companion object { + private const val UNKNOWN_EXCEPTION_MESSAGE = "Unknown Exception" + private const val PERMISSION_EXCEPTION_PARTIAL_MSG = + "but current role has no privileges on it" + private const val IP_NOT_IN_WHITE_LIST_EXCEPTION_PARTIAL_MSG = + "not allowed to access Snowflake" + @JvmStatic + fun argumentsForExceptionThrownWithExecute(): Stream { + return Stream.of( + Arguments.of(UNKNOWN_EXCEPTION_MESSAGE, false), + Arguments.of(PERMISSION_EXCEPTION_PARTIAL_MSG, true), + Arguments.of(IP_NOT_IN_WHITE_LIST_EXCEPTION_PARTIAL_MSG, true) + ) + } + } +} diff --git a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorTest.kt b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorTest.kt index e1d3e05b36cc..fb132f2a70cf 100644 --- a/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorTest.kt +++ b/airbyte-integrations/connectors/destination-snowflake/src/test/kotlin/io/airbyte/integrations/destination/snowflake/typing_deduping/SnowflakeSqlGeneratorTest.kt @@ -72,7 +72,7 @@ class SnowflakeSqlGeneratorTest { @Test fun columnCollision() { val parser = CatalogParser(generator) - var expectedColumns = LinkedHashMap() + val expectedColumns = LinkedHashMap() expectedColumns[ColumnId("_CURRENT_DATE", "CURRENT_DATE", "_CURRENT_DATE")] = AirbyteProtocolType.STRING expectedColumns[ColumnId("_CURRENT_DATE_1", "current_date", "_CURRENT_DATE_1")] = diff --git a/airbyte-integrations/connectors/source-bamboo-hr/README.md b/airbyte-integrations/connectors/source-bamboo-hr/README.md index 6bd34064cca5..3d9840f759b5 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/README.md +++ b/airbyte-integrations/connectors/source-bamboo-hr/README.md @@ -1,12 +1,16 @@ -# Bamboo-Hr source connector +# Bamboo Hr Source -This is the repository for the Bamboo-Hr source connector, written in Python. +This is the repository for the Bamboo HR source connector. For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/bamboo-hr). ## Local development ### Prerequisites +* Python (`^3.9`) +* Poetry (`^1.7`) - installation instructions [here](https://python-poetry.org/docs/#installation) + + - Python (~=3.9) - Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) @@ -21,7 +25,7 @@ poetry install --with dev ### Create credentials **If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/bamboo-hr) -to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_bamboo_hr/spec.yaml` file. +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `src/source_bamboo_hr/spec.yaml` file. Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. See `sample_files/sample_config.json` for a sample config file. @@ -34,12 +38,12 @@ poetry run source-bamboo-hr discover --config secrets/config.json poetry run source-bamboo-hr read --config secrets/config.json --catalog sample_files/configured_catalog.json ``` -### Running unit tests +### Running tests -To run unit tests locally, from the connector directory run: +To run tests locally, from the connector directory run: ``` -poetry run pytest unit_tests +poetry run pytest tests ``` ### Building the docker image @@ -79,7 +83,7 @@ If your connector requires to create or destroy resources for use during accepta ### Dependency Management -All of your dependencies should be managed via Poetry. +All of your dependencies should be managed via Poetry. To add a new dependency, run: ```bash @@ -94,7 +98,7 @@ You've checked out the repo, implemented a million dollar feature, and you're re 1. Make sure your changes are passing our test suite: `airbyte-ci connectors --name=source-bamboo-hr test` 2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): - - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `dockerImageTag` value in `metadata.yaml` - bump the `version` value in `pyproject.toml` 3. Make sure the `metadata.yaml` content is up to date. 4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/bamboo-hr.md`). diff --git a/airbyte-integrations/connectors/source-bamboo-hr/__init__.py b/airbyte-integrations/connectors/source-bamboo-hr/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-bamboo-hr/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-bamboo-hr/acceptance-test-config.yml b/airbyte-integrations/connectors/source-bamboo-hr/acceptance-test-config.yml index 62314bcc14a6..039d62b5b7c8 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-bamboo-hr/acceptance-test-config.yml @@ -1,22 +1,41 @@ # See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) # for more information about how to configure these tests connector_image: airbyte/source-bamboo-hr:dev -tests: +acceptance_tests: spec: - - spec_path: "source_bamboo_hr/spec.json" + tests: + - spec_path: "source_bamboo_hr/spec.yaml" connection: - - config_path: "secrets/config.json" - status: "succeed" - - config_path: "integration_tests/invalid_config.json" - status: "failed" + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" discovery: - - config_path: "secrets/config.json" - backward_compatibility_tests_config: - disable_for_version: "0.2.0" + tests: + - config_path: "secrets/config.json" + backward_compatibility_tests_config: + disable_for_version: "0.2.0" basic_read: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" - empty_streams: [] + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + # TODO uncomment this block to specify that the tests should assert the connector outputs the records provided in the input file a file + # expect_records: + # path: "integration_tests/expected_records.jsonl" + # extra_fields: no + # exact_order: no + # extra_records: yes + incremental: + bypass_reason: "This connector does not implement incremental sync" + # TODO uncomment this block this block if your connector implements incremental sync: + # tests: + # - config_path: "secrets/config.json" + # configured_catalog_path: "integration_tests/configured_catalog.json" + # future_state: + # future_state_path: "integration_tests/abnormal_state.json" full_refresh: - - config_path: "secrets/config.json" - configured_catalog_path: "integration_tests/configured_catalog.json" + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/__init__.py b/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/__init__.py index e69de29bb2d1..c941b3045795 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/__init__.py +++ b/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..52b0f2c2118f --- /dev/null +++ b/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "todo-abnormal-value" + } +} diff --git a/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/sample_config.json new file mode 100644 index 000000000000..6259faaf9e08 --- /dev/null +++ b/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "awesome-api-key", + "subdomain": "my-company" +} diff --git a/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/sample_state.json new file mode 100644 index 000000000000..3587e579822d --- /dev/null +++ b/airbyte-integrations/connectors/source-bamboo-hr/integration_tests/sample_state.json @@ -0,0 +1,5 @@ +{ + "todo-stream-name": { + "todo-field-name": "value" + } +} diff --git a/airbyte-integrations/connectors/source-bamboo-hr/metadata.yaml b/airbyte-integrations/connectors/source-bamboo-hr/metadata.yaml index 915e6db4c69c..3b6a12d5104e 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/metadata.yaml +++ b/airbyte-integrations/connectors/source-bamboo-hr/metadata.yaml @@ -1,4 +1,7 @@ data: + allowedHosts: + hosts: + - api.bamboohr.com ab_internal: ql: 200 sl: 100 @@ -7,12 +10,13 @@ data: connectorSubtype: api connectorType: source definitionId: 90916976-a132-4ce9-8bce-82a03dd58788 - dockerImageTag: 0.2.6 + dockerImageTag: 0.3.0 dockerRepository: airbyte/source-bamboo-hr documentationUrl: https://docs.airbyte.com/integrations/sources/bamboo-hr githubIssueLabel: source-bamboo-hr icon: bamboohr.svg license: MIT + releaseDate: 2021-08-27 name: BambooHR registries: cloud: @@ -27,7 +31,7 @@ data: supportLevel: community tags: - language:python - - cdk:python + - cdk:low-code connectorTestSuitesOptions: - suite: unitTests - suite: acceptanceTests diff --git a/airbyte-integrations/connectors/source-bamboo-hr/poetry.lock b/airbyte-integrations/connectors/source-bamboo-hr/poetry.lock index 255120e49aa3..fae7a4c33235 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/poetry.lock +++ b/airbyte-integrations/connectors/source-bamboo-hr/poetry.lock @@ -2,19 +2,20 @@ [[package]] name = "airbyte-cdk" -version = "0.80.0" +version = "1.1.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte_cdk-0.80.0-py3-none-any.whl", hash = "sha256:060e92323a73674fa4e9e2e4a1eb312b9b9d072c9bbe5fa28f54ef21cb4974f3"}, - {file = "airbyte_cdk-0.80.0.tar.gz", hash = "sha256:1383512a83917fecca5b24cea4c72aa5c561cf96dd464485fbcefda48fe574c5"}, + {file = "airbyte_cdk-1.1.1-py3-none-any.whl", hash = "sha256:edb8654193499ff3e4741c6bf354953d269c48c147e2f4a6184968e6aaac6302"}, + {file = "airbyte_cdk-1.1.1.tar.gz", hash = "sha256:96787fb5d3f1ca695d13cbbc0873397b479c7d6a3d3b10634b5a54ab5d6b55ef"}, ] [package.dependencies] -airbyte-protocol-models = "0.5.1" +airbyte-protocol-models = ">=0.9.0,<1.0" backoff = "*" cachetools = "*" +cryptography = ">=42.0.5,<43.0.0" Deprecated = ">=1.2,<1.3" dpath = ">=2.0.1,<2.1.0" genson = "1.2.2" @@ -22,10 +23,13 @@ isodate = ">=0.6.1,<0.7.0" Jinja2 = ">=3.1.2,<3.2.0" jsonref = ">=0.2,<0.3" jsonschema = ">=3.2.0,<3.3.0" +langchain_core = "0.1.42" pendulum = "<3.0.0" pydantic = ">=1.10.8,<2.0.0" +pyjwt = ">=2.8.0,<3.0.0" pyrate-limiter = ">=3.1.0,<3.2.0" python-dateutil = "*" +pytz = "2024.1" PyYAML = ">=6.0.1,<7.0.0" requests = "*" requests_cache = "*" @@ -34,32 +38,22 @@ wcmatch = "8.4" [package.extras] file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (>=15.0.0,<15.1.0)", "pytesseract (==0.3.10)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] sphinx-docs = ["Sphinx (>=4.2,<4.3)", "sphinx-rtd-theme (>=1.0,<1.1)"] -vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.1.16)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] [[package]] name = "airbyte-protocol-models" -version = "0.5.1" +version = "0.11.0" description = "Declares the Airbyte Protocol." optional = false python-versions = ">=3.8" files = [ - {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, - {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, + {file = "airbyte_protocol_models-0.11.0-py3-none-any.whl", hash = "sha256:2157757c1af8c13e471ab6a0304fd2f9a2a6af8cc9173937be1348a9553f7c32"}, + {file = "airbyte_protocol_models-0.11.0.tar.gz", hash = "sha256:1c7e46251b0d5a292b4aa382df24f415ac2a2a2b4719361b3c0f76368a043c23"}, ] [package.dependencies] pydantic = ">=1.9.2,<2.0.0" -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, -] - [[package]] name = "attrs" version = "23.2.0" @@ -148,6 +142,70 @@ files = [ {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -258,6 +316,60 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "cryptography" +version = "42.0.7" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:a987f840718078212fdf4504d0fd4c6effe34a7e4740378e59d47696e8dfb477"}, + {file = "cryptography-42.0.7-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bd13b5e9b543532453de08bcdc3cc7cebec6f9883e886fd20a92f26940fd3e7a"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79165431551042cc9d1d90e6145d5d0d3ab0f2d66326c201d9b0e7f5bf43604"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a47787a5e3649008a1102d3df55424e86606c9bae6fb77ac59afe06d234605f8"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:02c0eee2d7133bdbbc5e24441258d5d2244beb31da5ed19fbb80315f4bbbff55"}, + {file = "cryptography-42.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5e44507bf8d14b36b8389b226665d597bc0f18ea035d75b4e53c7b1ea84583cc"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7f8b25fa616d8b846aef64b15c606bb0828dbc35faf90566eb139aa9cff67af2"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:93a3209f6bb2b33e725ed08ee0991b92976dfdcf4e8b38646540674fc7508e13"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e6b8f1881dac458c34778d0a424ae5769de30544fc678eac51c1c8bb2183e9da"}, + {file = "cryptography-42.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3de9a45d3b2b7d8088c3fbf1ed4395dfeff79d07842217b38df14ef09ce1d8d7"}, + {file = "cryptography-42.0.7-cp37-abi3-win32.whl", hash = "sha256:789caea816c6704f63f6241a519bfa347f72fbd67ba28d04636b7c6b7da94b0b"}, + {file = "cryptography-42.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:8cb8ce7c3347fcf9446f201dc30e2d5a3c898d009126010cbd1f443f28b52678"}, + {file = "cryptography-42.0.7-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:a3a5ac8b56fe37f3125e5b72b61dcde43283e5370827f5233893d461b7360cd4"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:779245e13b9a6638df14641d029add5dc17edbef6ec915688f3acb9e720a5858"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d563795db98b4cd57742a78a288cdbdc9daedac29f2239793071fe114f13785"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:31adb7d06fe4383226c3e963471f6837742889b3c4caa55aac20ad951bc8ffda"}, + {file = "cryptography-42.0.7-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:efd0bf5205240182e0f13bcaea41be4fdf5c22c5129fc7ced4a0282ac86998c9"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a9bc127cdc4ecf87a5ea22a2556cab6c7eda2923f84e4f3cc588e8470ce4e42e"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3577d029bc3f4827dd5bf8bf7710cac13527b470bbf1820a3f394adb38ed7d5f"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2e47577f9b18723fa294b0ea9a17d5e53a227867a0a4904a1a076d1646d45ca1"}, + {file = "cryptography-42.0.7-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1a58839984d9cb34c855197043eaae2c187d930ca6d644612843b4fe8513c886"}, + {file = "cryptography-42.0.7-cp39-abi3-win32.whl", hash = "sha256:e6b79d0adb01aae87e8a44c2b64bc3f3fe59515280e00fb6d57a7267a2583cda"}, + {file = "cryptography-42.0.7-cp39-abi3-win_amd64.whl", hash = "sha256:16268d46086bb8ad5bf0a2b5544d8a9ed87a0e33f5e77dd3c3301e63d941a83b"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2954fccea107026512b15afb4aa664a5640cd0af630e2ee3962f2602693f0c82"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:362e7197754c231797ec45ee081f3088a27a47c6c01eff2ac83f60f85a50fe60"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4f698edacf9c9e0371112792558d2f705b5645076cc0aaae02f816a0171770fd"}, + {file = "cryptography-42.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5482e789294854c28237bba77c4c83be698be740e31a3ae5e879ee5444166582"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e9b2a6309f14c0497f348d08a065d52f3020656f675819fc405fb63bbcd26562"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d8e3098721b84392ee45af2dd554c947c32cc52f862b6a3ae982dbb90f577f14"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c65f96dad14f8528a447414125e1fc8feb2ad5a272b8f68477abbcc1ea7d94b9"}, + {file = "cryptography-42.0.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36017400817987670037fbb0324d71489b6ead6231c9604f8fc1f7d008087c68"}, + {file = "cryptography-42.0.7.tar.gz", hash = "sha256:ecbfbc00bf55888edda9868a4cf927205de8499e7fabe6c050322298382953f2"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + [[package]] name = "deprecated" version = "1.2.14" @@ -348,13 +460,13 @@ six = "*" [[package]] name = "jinja2" -version = "3.1.3" +version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] @@ -363,6 +475,31 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + [[package]] name = "jsonref" version = "0.2" @@ -395,6 +532,44 @@ six = ">=1.11.0" format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] +[[package]] +name = "langchain-core" +version = "0.1.42" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langchain_core-0.1.42-py3-none-any.whl", hash = "sha256:c5653ffa08a44f740295c157a24c0def4a753333f6a2c41f76bf431cd00be8b5"}, + {file = "langchain_core-0.1.42.tar.gz", hash = "sha256:40751bf60ea5d8e2b2efe65290db434717ee3834870c002e40e2811f09d814e6"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.0,<0.2.0" +packaging = ">=23.2,<24.0" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<9.0.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[[package]] +name = "langsmith" +version = "0.1.63" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.8.1" +files = [ + {file = "langsmith-0.1.63-py3-none-any.whl", hash = "sha256:7810afdf5e3f3b472fc581a29371fb96cd843dde2149e048d1b9610325159d1e"}, + {file = "langsmith-0.1.63.tar.gz", hash = "sha256:a609405b52f6f54df442a142cbf19ab38662d54e532f96028b4c546434d4afdf"}, +] + +[package.dependencies] +orjson = ">=3.9.14,<4.0.0" +pydantic = ">=1,<3" +requests = ">=2,<3" + [[package]] name = "markupsafe" version = "2.1.5" @@ -464,15 +639,70 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "orjson" +version = "3.10.3" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, + {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, + {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, + {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, + {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, + {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, + {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, + {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, + {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, + {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, + {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, + {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, + {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, + {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, + {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, + {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, + {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, + {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, + {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, + {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, + {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, + {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, + {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, + {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, + {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, + {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, + {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, + {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, + {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, + {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, + {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, +] + [[package]] name = "packaging" -version = "24.0" +version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, - {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] [[package]] @@ -511,28 +741,29 @@ pytzdata = ">=2020.1" [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -540,31 +771,16 @@ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] - -[[package]] -name = "pybamboohr" -version = "0.8.1" -description = "A Python wrapper for the Bamboo HR API" +name = "pycparser" +version = "2.22" +description = "C parser in Python" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "PyBambooHR-0.8.1-py3-none-any.whl", hash = "sha256:31eb7f49dbc7668616401ceaa2548d64d1d147441550c96843884453d5ce932b"}, - {file = "PyBambooHR-0.8.1.tar.gz", hash = "sha256:af32b36f0049a62e8ba4bedda7223e3cab3e8be548deaddc0ff798facf16da54"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -[package.dependencies] -requests = "*" -xmltodict = "*" - [[package]] name = "pydantic" version = "1.10.15" @@ -617,6 +833,23 @@ typing-extensions = ">=4.2.0" dotenv = ["python-dotenv (>=0.10.4)"] email = ["email-validator (>=1.0.3)"] +[[package]] +name = "pyjwt" +version = "2.8.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pyrate-limiter" version = "3.1.1" @@ -675,27 +908,25 @@ files = [ [[package]] name = "pytest" -version = "6.2.5" +version = "8.2.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, - {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, + {file = "pytest-8.2.1-py3-none-any.whl", hash = "sha256:faccc5d332b8c3719f40283d0d44aa5cf101cec36f88cde9ed8f2bc0538612b1"}, + {file = "pytest-8.2.1.tar.gz", hash = "sha256:5046e5b46d8e4cac199c373041f26be56fdb81eb4e67dc11d4e10811fc3408fd"}, ] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -toml = "*" +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-mock" @@ -728,6 +959,17 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pytzdata" version = "2020.1" @@ -801,13 +1043,13 @@ files = [ [[package]] name = "requests" -version = "2.31.0" +version = "2.32.2" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, + {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, + {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, ] [package.dependencies] @@ -869,19 +1111,18 @@ fixture = ["fixtures"] [[package]] name = "setuptools" -version = "69.5.1" +version = "70.0.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, - {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, + {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, + {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -895,25 +1136,40 @@ files = [ ] [[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" +name = "tenacity" +version = "8.3.0" +description = "Retry code until it succeeds" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.8" files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, + {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"}, + {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "typing-extensions" -version = "4.11.0" +version = "4.12.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, - {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, + {file = "typing_extensions-4.12.0-py3-none-any.whl", hash = "sha256:b349c66bea9016ac22978d800cfff206d5f9816951f12a7d0ec5578b0a819594"}, + {file = "typing_extensions-4.12.0.tar.gz", hash = "sha256:8cbcdc8606ebcb0d95453ad7dc5065e6237b6aa230a31e81d0f440c30fed5fd8"}, ] [[package]] @@ -1040,18 +1296,7 @@ files = [ {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, ] -[[package]] -name = "xmltodict" -version = "0.13.0" -description = "Makes working with XML feel like you are working with JSON" -optional = false -python-versions = ">=3.4" -files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, -] - [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "e681df5996cfc33be2b18f06bf7fa234d4c605bb3e5b2f17e65c75285b86a426" +content-hash = "5b0cd125a4941563e47d35d2a1603eb16db2c74dd30a0ea65043dfabe1a82cd9" diff --git a/airbyte-integrations/connectors/source-bamboo-hr/pyproject.toml b/airbyte-integrations/connectors/source-bamboo-hr/pyproject.toml index 332335c7a587..96dd63890f6c 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/pyproject.toml +++ b/airbyte-integrations/connectors/source-bamboo-hr/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "0.2.6" +version = "0.3.0" name = "source-bamboo-hr" description = "Source implementation for Bamboo Hr." authors = [ "Airbyte ",] @@ -12,18 +12,16 @@ readme = "README.md" documentation = "https://docs.airbyte.com/integrations/sources/bamboo-hr" homepage = "https://airbyte.com" repository = "https://github.com/airbytehq/airbyte" -[[tool.poetry.packages]] -include = "source_bamboo_hr" +packages = [ { include = "source_bamboo_hr" }, {include = "main.py" } ] [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "0.80.0" -PyBambooHR = "==0.8.1" +airbyte-cdk = "^1" [tool.poetry.scripts] source-bamboo-hr = "source_bamboo_hr.run:run" [tool.poetry.group.dev.dependencies] -pytest-mock = "^3.6.1" -requests-mock = "^1.9.3" -pytest = "^6.1" +requests-mock = "*" +pytest-mock = "*" +pytest = "*" diff --git a/airbyte-integrations/connectors/source-bamboo-hr/requirements.txt b/airbyte-integrations/connectors/source-bamboo-hr/requirements.txt deleted file mode 100644 index d6e1198b1ab1..000000000000 --- a/airbyte-integrations/connectors/source-bamboo-hr/requirements.txt +++ /dev/null @@ -1 +0,0 @@ --e . diff --git a/airbyte-integrations/connectors/source-bamboo-hr/sample_files/configured_catalog.json b/airbyte-integrations/connectors/source-bamboo-hr/sample_files/configured_catalog.json deleted file mode 100644 index 2486e6964fb1..000000000000 --- a/airbyte-integrations/connectors/source-bamboo-hr/sample_files/configured_catalog.json +++ /dev/null @@ -1,97 +0,0 @@ -{ - "streams": [ - { - "stream": { - "name": "employees_directory_stream", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "id": { - "type": ["null", "string"] - }, - "displayName": { - "type": ["null", "string"] - }, - "firstName": { - "type": ["null", "string"] - }, - "lastName": { - "type": ["null", "string"] - }, - "preferredName": { - "type": ["null", "string"] - }, - "jobTitle": { - "type": ["null", "string"] - }, - "workPhone": { - "type": ["null", "string"] - }, - "mobilePhone": { - "type": ["null", "string"] - }, - "workEmail": { - "type": ["null", "string"] - }, - "department": { - "type": ["null", "string"] - }, - "location": { - "type": ["null", "string"] - }, - "division": { - "type": ["null", "string"] - }, - "linkedIn": { - "type": ["null", "string"] - }, - "pronouns": { - "type": ["null", "string"] - }, - "workPhoneExtension": { - "type": ["null", "string"] - }, - "supervisor": { - "type": ["null", "string"] - }, - "photoUploaded": { - "type": ["null", "boolean"] - }, - "photoUrl": { - "type": ["null", "string"] - }, - "canUploadPhoto": { - "type": ["null", "boolean"] - } - } - }, - "supported_sync_modes": ["full_refresh"], - "supported_destination_sync_modes": ["overwrite", "append_dedup"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append_dedup" - }, - { - "stream": { - "name": "custom_reports_stream", - "json_schema": { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "zipcode": { - "type": ["null", "string"] - }, - "terminationDate": { - "type": ["null", "string"] - } - } - }, - "supported_sync_modes": ["full_refresh"], - "supported_destination_sync_modes": ["overwrite", "append_dedup"] - }, - "sync_mode": "full_refresh", - "destination_sync_mode": "append_dedup" - } - ] -} diff --git a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/components.py b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/components.py new file mode 100644 index 000000000000..4cc87c9a4019 --- /dev/null +++ b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/components.py @@ -0,0 +1,59 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import InitVar, dataclass +from typing import Any, Mapping + +from airbyte_cdk.sources.declarative.interpolation.interpolated_string import InterpolatedString +from airbyte_cdk.sources.declarative.schema.json_file_schema_loader import JsonFileSchemaLoader, _default_file_path + + +@dataclass +class BambooHRSchemaLoader(JsonFileSchemaLoader): + + config: Mapping[str, Any] + parameters: InitVar[Mapping[str, Any]] = {"name": "custom_reports_stream"} + + def __post_init__(self, parameters: Mapping[str, Any]): + if not self.file_path: + self.file_path = _default_file_path() + self.file_path = InterpolatedString.create(self.file_path, parameters=self.parameters) + + def get_json_schema(self) -> Mapping[str, Any]: + """ + Returns the JSON schema. + + The final schema is constructed by first generating a schema for the fields + in the config and, if default fields should be included, adding these to the + schema. + """ + schema = self._get_json_schema_from_config() + if self.config.get("custom_reports_include_default_fields"): + default_schema = self._get_json_schema_from_file() + schema = self._union_schemas(default_schema, schema) + return schema + + def _get_json_schema_from_config(self): + if self.config.get("custom_reports_fields"): + properties = { + field.strip(): {"type": ["null", "string"]} + for field in self.convert_custom_reports_fields_to_list(self.config.get("custom_reports_fields", "")) + } + else: + properties = {} + return { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": properties, + } + + def convert_custom_reports_fields_to_list(self, custom_reports_fields: str) -> list: + return custom_reports_fields.split(",") if custom_reports_fields else [] + + def _get_json_schema_from_file(self): + return super().get_json_schema() + + def _union_schemas(self, schema1, schema2): + schema1["properties"] = {**schema1["properties"], **schema2["properties"]} + return schema1 diff --git a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/exception.py b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/exception.py deleted file mode 100644 index ec1c757bc428..000000000000 --- a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/exception.py +++ /dev/null @@ -1,24 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -class BambooHrError(Exception): - message = "" - - def __init__(self): - super().__init__(self.message) - - -class NullFieldsError(BambooHrError): - message = "Field `custom_reports_fields` cannot be empty if `custom_reports_include_default_fields` is false." - - -class AvailableFieldsAccessDeniedError(BambooHrError): - message = "You hasn't access to any report fields. Please check your access level." - - -class CustomFieldsAccessDeniedError(Exception): - def __init__(self, denied_fields): - self.message = f"Access to fields: {', '.join(denied_fields)} - denied. Please check your access level." - super().__init__(self.message) diff --git a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/manifest.yaml b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/manifest.yaml new file mode 100644 index 000000000000..26fcd6b0bc9a --- /dev/null +++ b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/manifest.yaml @@ -0,0 +1,156 @@ +version: 0.83.0 + +type: DeclarativeSource + +check: #TODO implement custom check for https://github.com/airbytehq/airbyte/blob/cc388fc6d0e8d6223bd2942e9f6466b17895dbf9/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/source.py#L145-L165 + type: CheckStream + stream_names: + - meta_fields_stream + +definitions: + streams: + custom_reports_stream: + type: DeclarativeStream + name: custom_reports_stream + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: reports/custom + http_method: POST + request_headers: + Accept: application/json + request_body_json: + title: Airbyte + fields: list(self.schema["properties"].keys()) # TODO how to get the schema properties keys as list here + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - employees + schema_loader: + type: CustomSchemaLoader + class_name: source_bamboo_hr.components.BambooHRSchemaLoader + employees_directory_stream: + type: DeclarativeStream + name: employees_directory_stream + primary_key: + - id + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: employees/directory + http_method: GET + request_headers: + Accept: application/json + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - employees + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/employees_directory_stream" + meta_fields_stream: + type: DeclarativeStream + name: meta_fields_stream + retriever: + type: SimpleRetriever + requester: + $ref: "#/definitions/base_requester" + path: meta/fields + http_method: GET + request_headers: + Accept: application/json + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: [] + schema_loader: + type: InlineSchemaLoader + schema: + $ref: "#/schemas/meta_fields_stream" + base_requester: + type: HttpRequester + url_base: https://api.bamboohr.com/api/gateway.php/{{ config['subdomain'] }}/v1/ + authenticator: + type: BasicHttpAuthenticator + password: "x" + username: '{{ config["api_key"] }}' + +streams: + - $ref: "#/definitions/streams/custom_reports_stream" + - $ref: "#/definitions/streams/employees_directory_stream" + - $ref: "#/definitions/streams/meta_fields_stream" + +spec: + type: Spec + documentation_url: https://docs.airbyte.com/integrations/sources/bamboo-hr + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + title: "Bamboo HR Spec" + type: object + additionalProperties: true + required: + - api_key + - subdomain + properties: + api_key: + type: string + order: 0 + title: api_key + description: Api key of bamboo hr + airbyte_secret: true + subdomain: + type: string + order: 1 + title: subdomain + description: Sub Domain of bamboo hr + custom_reports_fields: + type: string + order: 2 + title: custom_reports_fields + description: Comma-separated list of fields to include in custom reports. + custom_reports_include_default_fields: + title: custom_reports_include_default_fields + description: >- + If true, the custom reports endpoint will include the default fields + defined here: + https://documentation.bamboohr.com/docs/list-of-field-names. + type: boolean + default: true + order: 3 + +metadata: + autoImportSchema: + custom_reports_stream: false + employees_directory_stream: false + meta_fields_stream: false + +schemas: + employees_directory_stream: + type: object + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + { + "id": { "type": ["null", "string"] }, + "type": { "type": ["null", "string"] }, + "name": { "type": ["null", "string"] }, + } + meta_fields_stream: + type: object + $schema: http://json-schema.org/draft-07/schema# + additionalProperties: true + properties: + { + "id": { "type": ["null", "string"] }, + "name": { "type": ["null", "string"] }, + "type": { "type": ["null", "string"] }, + "alias": { "type": ["null", "string"] }, + } diff --git a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/run.py b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/run.py index d9a04d56a964..9e590c98b2f0 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/run.py +++ b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/run.py @@ -6,7 +6,8 @@ import sys from airbyte_cdk.entrypoint import launch -from source_bamboo_hr import SourceBambooHr + +from .source import SourceBambooHr def run(): diff --git a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/schemas/employees_directory_stream.json b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/schemas/employees_directory_stream.json deleted file mode 100644 index 732c2f043933..000000000000 --- a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/schemas/employees_directory_stream.json +++ /dev/null @@ -1,82 +0,0 @@ -{ - "type": ["null", "object"], - "required": [], - "properties": { - "id": { - "description": "A unique identifier for the employee.", - "type": ["null", "string"] - }, - "displayName": { - "description": "The name to be displayed for the employee.", - "type": ["null", "string"] - }, - "firstName": { - "description": "The first name of the employee.", - "type": ["null", "string"] - }, - "lastName": { - "description": "The last name of the employee.", - "type": ["null", "string"] - }, - "preferredName": { - "description": "The preferred or nickname of the employee.", - "type": ["null", "string"] - }, - "jobTitle": { - "description": "The job title or role of the employee.", - "type": ["null", "string"] - }, - "workPhone": { - "description": "The work phone number of the employee.", - "type": ["null", "string"] - }, - "mobilePhone": { - "description": "The mobile phone number of the employee.", - "type": ["null", "string"] - }, - "workEmail": { - "description": "The work email address of the employee.", - "type": ["null", "string"] - }, - "department": { - "description": "The department in which the employee works.", - "type": ["null", "string"] - }, - "location": { - "description": "The physical location where the employee works.", - "type": ["null", "string"] - }, - "division": { - "description": "The division of the company to which the employee belongs.", - "type": ["null", "string"] - }, - "linkedIn": { - "description": "The LinkedIn profile URL of the employee, if available.", - "type": ["null", "string"] - }, - "pronouns": { - "description": "The preferred pronouns of the employee.", - "type": ["null", "string"] - }, - "workPhoneExtension": { - "description": "The extension number for the employee's work phone line.", - "type": ["null", "string"] - }, - "supervisor": { - "description": "The supervisor or manager of the employee.", - "type": ["null", "string"] - }, - "photoUploaded": { - "description": "Indicates whether a profile photo has been uploaded for the employee.", - "type": ["null", "boolean"] - }, - "photoUrl": { - "description": "The URL of the employee's profile photo.", - "type": ["null", "string"] - }, - "canUploadPhoto": { - "description": "Indicates whether the employee has permission to upload a profile photo.", - "type": ["null", "number"] - } - } -} diff --git a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/source.py b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/source.py index 66e893b53743..1441fe735953 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/source.py +++ b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/source.py @@ -2,170 +2,17 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource -import base64 -import logging -from abc import ABC -from typing import Any, Iterable, List, Mapping, Optional, Tuple +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. -import requests -from airbyte_cdk.models.airbyte_protocol import SyncMode -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream -from airbyte_cdk.sources.streams.http import HttpStream -from airbyte_cdk.sources.streams.http.requests_native_auth import TokenAuthenticator +WARNING: Do not modify this file. +""" -from .exception import AvailableFieldsAccessDeniedError, CustomFieldsAccessDeniedError, NullFieldsError -from .utils import convert_custom_reports_fields_to_list, validate_custom_fields - -class BambooHrStream(HttpStream, ABC): - def __init__(self, config: Mapping[str, Any]) -> None: - self.config = config - super().__init__(authenticator=config["authenticator"]) - - @property - def url_base(self) -> str: - return f"https://api.bamboohr.com/api/gateway.php/{self.config['subdomain']}/v1/" - - def request_headers( - self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any] = None, next_page_token: Mapping[str, Any] = None - ) -> Mapping[str, Any]: - return {"Accept": "application/json"} - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - """ - BambooHR does not support pagination. - """ - pass - - -class MetaFieldsStream(BambooHrStream): - primary_key = None - - def path(self, **kwargs) -> str: - return "meta/fields" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - yield from response.json() - - -class EmployeesDirectoryStream(BambooHrStream): - primary_key = "id" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - yield from response.json()["employees"] - - def path(self, **kwargs) -> str: - return "employees/directory" - - -class CustomReportsStream(BambooHrStream): - primary_key = None - - def __init__(self, *args, **kwargs): - self._schema = None - super().__init__(*args, **kwargs) - - @property - def schema(self): - if not self._schema: - self._schema = self.get_json_schema() - return self._schema - - def _get_json_schema_from_config(self): - if self.config.get("custom_reports_fields"): - properties = { - field.strip(): {"type": ["null", "string"]} - for field in convert_custom_reports_fields_to_list(self.config.get("custom_reports_fields", "")) - } - else: - properties = {} - return { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": properties, - } - - def _get_json_schema_from_file(self): - return super().get_json_schema() - - @staticmethod - def _union_schemas(schema1, schema2): - schema1["properties"] = {**schema1["properties"], **schema2["properties"]} - return schema1 - - def get_json_schema(self) -> Mapping[str, Any]: - """ - Returns the JSON schema. - - The final schema is constructed by first generating a schema for the fields - in the config and, if default fields should be included, adding these to the - schema. - """ - schema = self._get_json_schema_from_config() - if self.config.get("custom_reports_include_default_fields"): - default_schema = self._get_json_schema_from_file() - schema = self._union_schemas(default_schema, schema) - return schema - - def path(self, **kwargs) -> str: - return "reports/custom" - - @property - def http_method(self) -> str: - return "POST" - - def request_body_json(self, **kwargs) -> Optional[Mapping]: - return {"title": "Airbyte", "fields": list(self.schema["properties"].keys())} - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - yield from response.json()["employees"] - - -class SourceBambooHr(AbstractSource): - @staticmethod - def _get_authenticator(api_key): - """ - Returns a TokenAuthenticator. - - The API token is concatenated with `:x` and the resulting string is base-64 encoded. - See https://documentation.bamboohr.com/docs#authentication - """ - return TokenAuthenticator(token=base64.b64encode(f"{api_key}:x".encode("utf-8")).decode("utf-8"), auth_method="Basic") - - @staticmethod - def add_authenticator_to_config(config: Mapping[str, Any]) -> Mapping[str, Any]: - """ - Adds an authenticator entry to the config and returns the config. - """ - config["authenticator"] = SourceBambooHr._get_authenticator(config["api_key"]) - return config - - def check_connection(self, logger: logging.Logger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: - """ - Verifies the config and attempts to fetch the fields from the meta/fields endpoint. - """ - config = SourceBambooHr.add_authenticator_to_config(config) - - if not config.get("custom_reports_fields") and not config.get("custom_reports_include_default_fields"): - return False, NullFieldsError() - - available_fields = MetaFieldsStream(config).read_records(sync_mode=SyncMode.full_refresh) - custom_fields = convert_custom_reports_fields_to_list(config.get("custom_reports_fields", "")) - denied_fields = validate_custom_fields(custom_fields, available_fields) - - if denied_fields: - return False, CustomFieldsAccessDeniedError(denied_fields) - - try: - next(available_fields) - return True, None - except StopIteration: - return False, AvailableFieldsAccessDeniedError() - - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - config = SourceBambooHr.add_authenticator_to_config(config) - return [ - CustomReportsStream(config), - ] +# Declarative Source +class SourceBambooHr(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/spec.json b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/spec.json deleted file mode 100644 index 1c362f7fac33..000000000000 --- a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/spec.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "documentationUrl": "https://docs.airbyte.com/integrations/sources/bamboo-hr", - "connectionSpecification": { - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Bamboo HR Spec", - "type": "object", - "required": ["subdomain", "api_key"], - "additionalProperties": true, - "properties": { - "subdomain": { - "type": "string", - "description": "Sub Domain of bamboo hr" - }, - "api_key": { - "type": "string", - "description": "Api key of bamboo hr", - "airbyte_secret": true - }, - "custom_reports_fields": { - "type": "string", - "default": "", - "description": "Comma-separated list of fields to include in custom reports." - }, - "custom_reports_include_default_fields": { - "type": "boolean", - "default": true, - "description": "If true, the custom reports endpoint will include the default fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names." - } - } - } -} diff --git a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/utils.py b/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/utils.py deleted file mode 100644 index ea662129fe19..000000000000 --- a/airbyte-integrations/connectors/source-bamboo-hr/source_bamboo_hr/utils.py +++ /dev/null @@ -1,17 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -def convert_custom_reports_fields_to_list(custom_reports_fields: str) -> list: - return custom_reports_fields.split(",") if custom_reports_fields else [] - - -def validate_custom_fields(custom_fields, available_fields): - denied_fields = [] - for custom_field in custom_fields: - has_access_to_custom_field = any(available_field.get("name") == custom_field for available_field in available_fields) - if not has_access_to_custom_field: - denied_fields.append(custom_field) - - return denied_fields diff --git a/airbyte-integrations/connectors/source-bamboo-hr/unit_tests/unit_test.py b/airbyte-integrations/connectors/source-bamboo-hr/unit_tests/unit_test.py deleted file mode 100644 index b842ff138f36..000000000000 --- a/airbyte-integrations/connectors/source-bamboo-hr/unit_tests/unit_test.py +++ /dev/null @@ -1,84 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - -import pytest -from airbyte_cdk.logger import AirbyteLogger -from airbyte_cdk.models import Status -from source_bamboo_hr.source import CustomReportsStream, EmployeesDirectoryStream, SourceBambooHr - - -@pytest.fixture -def config(): - return {"api_key": "foo", "subdomain": "bar", "authenticator": "baz", "custom_reports_include_default_fields": True} - - -def test_source_bamboo_hr_client_wrong_credentials(): - source = SourceBambooHr() - result = source.check(logger=AirbyteLogger, config={"subdomain": "test", "api_key": "blah-blah"}) - assert result.status == Status.FAILED - - -@pytest.mark.parametrize( - "custom_reports_fields,custom_reports_include_default_fields,available_fields,expected_message", - [ - ( - "", - False, - {}, - "NullFieldsError('Field `custom_reports_fields` cannot be empty if `custom_reports_include_default_fields` is false.')", - ), - ("", True, {}, 'AvailableFieldsAccessDeniedError("You hasn\'t access to any report fields. Please check your access level.")'), - ( - "Test", - True, - [{"name": "NewTest"}], - "CustomFieldsAccessDeniedError('Access to fields: Test - denied. Please check your access level.')", - ), - ], -) -def test_check_failed( - config, requests_mock, custom_reports_fields, custom_reports_include_default_fields, available_fields, expected_message -): - config["custom_reports_fields"] = custom_reports_fields - config["custom_reports_include_default_fields"] = custom_reports_include_default_fields - requests_mock.get("https://api.bamboohr.com/api/gateway.php/bar/v1/meta/fields", json=available_fields) - - source = SourceBambooHr() - result = source.check(logger=AirbyteLogger, config=config) - - assert result.status == Status.FAILED - assert result.message == expected_message - - -def test_employees_directory_stream_url_base(config): - stream = EmployeesDirectoryStream(config) - assert stream.url_base == "https://api.bamboohr.com/api/gateway.php/bar/v1/" - - -def test_custom_reports_stream_get_json_schema_from_config(config): - config["custom_reports_fields"] = "one,two , three" - assert CustomReportsStream(config)._get_json_schema_from_config() == { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "one": {"type": ["null", "string"]}, - "two": {"type": ["null", "string"]}, - "three": {"type": ["null", "string"]}, - }, - } - - -def test_custom_reports_stream_union_schemas(): - schema1 = {"properties": {"one": 1, "two": 2}} - schema2 = {"properties": {"two": 2, "three": 3}} - assert CustomReportsStream._union_schemas(schema1, schema2) == {"properties": {"one": 1, "two": 2, "three": 3}} - - -def test_custom_reports_stream_request_body_json(config): - stream = CustomReportsStream(config) - stream._schema = {"properties": {"one": 1, "two": 2}} - assert stream.request_body_json() == { - "title": "Airbyte", - "fields": ["one", "two"], - } diff --git a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml index e1a561c1f805..77c6f6864432 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml +++ b/airbyte-integrations/connectors/source-declarative-manifest/metadata.yaml @@ -8,7 +8,7 @@ data: connectorType: source definitionId: 64a2f99c-542f-4af8-9a6f-355f1217b436 # This version should not be updated manually - it is updated by the CDK release workflow. - dockerImageTag: 1.1.1 + dockerImageTag: 1.2.1 dockerRepository: airbyte/source-declarative-manifest # This page is hidden from the docs for now, since the connector is not in any Airbyte registries. documentationUrl: https://docs.airbyte.com/integrations/sources/low-code diff --git a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock index c52537216149..69f315c1593e 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock +++ b/airbyte-integrations/connectors/source-declarative-manifest/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "airbyte-cdk" -version = "1.1.3" +version = "1.2.1" description = "A framework for writing Airbyte Connectors." optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "airbyte_cdk-1.1.3-py3-none-any.whl", hash = "sha256:d72c8a26ed41dac11b2b945b98dd81fb868f31bed150c5a2495c2dd68c61df86"}, - {file = "airbyte_cdk-1.1.3.tar.gz", hash = "sha256:8d2a331a4a61f7d7ec1ff5ba76ca5d4fd70c2e24146e4b12673568c08484dece"}, + {file = "airbyte_cdk-1.2.1-py3-none-any.whl", hash = "sha256:ca60ae569cdb8360daac2f428efb52591a34fb959ab48498a6996788ade8af24"}, + {file = "airbyte_cdk-1.2.1.tar.gz", hash = "sha256:da958afe1a08701a5db47786f865aa889003f77d4863e60384a363845ee78042"}, ] [package.dependencies] @@ -1326,4 +1326,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.9,<3.12" -content-hash = "c4115ea3f2208f9cbef829c6b3c8a7f5233005de0aabfb6654aee06da5b34588" +content-hash = "805364db35c89bd4a138c37bba962ff134a3053b67f0b1e3d4ec47ec80ba904c" diff --git a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml index 984bfff12e49..361359ce5d7e 100644 --- a/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml +++ b/airbyte-integrations/connectors/source-declarative-manifest/pyproject.toml @@ -17,7 +17,7 @@ include = "source_declarative_manifest" [tool.poetry.dependencies] python = "^3.9,<3.12" -airbyte-cdk = "1.1.3" +airbyte-cdk = "1.2.1" [tool.poetry.scripts] source-declarative-manifest = "source_declarative_manifest.run:run" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml b/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml index c1ab05885be6..dae55df93f18 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-facebook-marketing/acceptance-test-config.yml @@ -40,6 +40,8 @@ acceptance_tests: timeout_seconds: 6000 future_state: future_state_path: "integration_tests/future_state.json" + # ads_insights is currently failing because there is a lookback window and the minimum value is one day + skip_comprehensive_incremental_tests: true full_refresh: tests: - config_path: "secrets/config.json" diff --git a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json index 328659d53acc..4566a1a627e7 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/integration_tests/spec.json @@ -175,7 +175,6 @@ "catalog_segment_value_omni_purchase_roas", "catalog_segment_value_website_purchase_roas", "clicks", - "conversion_lead_rate", "conversion_rate_ranking", "conversion_values", "conversions", @@ -186,7 +185,6 @@ "cost_per_action_type", "cost_per_ad_click", "cost_per_conversion", - "cost_per_conversion_lead", "cost_per_dda_countby_convs", "cost_per_estimated_ad_recallers", "cost_per_inline_link_click", diff --git a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml index be51bdbd6a32..432dda39f892 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml +++ b/airbyte-integrations/connectors/source-facebook-marketing/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: e7778cfc-e97c-4458-9ecb-b4f2bba8946c - dockerImageTag: 3.0.0 + dockerImageTag: 3.1.0 dockerRepository: airbyte/source-facebook-marketing documentationUrl: https://docs.airbyte.com/integrations/sources/facebook-marketing githubIssueLabel: source-facebook-marketing @@ -47,6 +47,9 @@ data: 3.0.0: message: "There are breaking schema changes to Custom Insights Streams that use body_asset, call_to_action_asset, description_asset, image_asset, link_url_asset, title_asset or video_asset. If you use any of these streams, you will need to retest source configuration, refresh the source schema and clear affected streams after upgrading." upgradeDeadline: "2024-06-14" + 3.1.0: + message: "The `AdsInsights` Reports now don't have the possibility to fetch the root level properties `cost_per_conversion_lead` and `conversion_lead_rate`. These will therefore be removed." + upgradeDeadline: "2024-06-14" suggestedStreams: streams: - ads_insights diff --git a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml index f40d0f699897..68df9dfb0b75 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml +++ b/airbyte-integrations/connectors/source-facebook-marketing/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "3.0.0" +version = "3.1.0" name = "source-facebook-marketing" description = "Source implementation for Facebook Marketing." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json index 713982aac032..d1c28e2a2a0a 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/ads_insights.json @@ -104,10 +104,6 @@ "description": "Total number of clicks", "type": ["null", "integer"] }, - "conversion_lead_rate": { - "description": "Rate of leads generated from conversions", - "type": ["null", "number"] - }, "conversion_rate_ranking": { "description": "Ranking based on conversion rates", "type": ["null", "string"] @@ -148,10 +144,6 @@ "description": "Cost per conversion", "$ref": "ads_action_stats.json" }, - "cost_per_conversion_lead": { - "description": "Cost per conversion lead", - "type": ["null", "number"] - }, "cost_per_estimated_ad_recallers": { "description": "Cost per estimated ad recallers", "type": ["null", "number"] diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/shared/ads_action_stats.json b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/shared/ads_action_stats.json index 4c0a0d139837..b257ae96bc12 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/shared/ads_action_stats.json +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/schemas/shared/ads_action_stats.json @@ -32,6 +32,9 @@ }, "value": { "type": ["null", "number"] + }, + "lead": { + "type": ["null", "number"] } } } diff --git a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/spec.py b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/spec.py index 4b1b7a8a51e4..470312b4db04 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/spec.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/source_facebook_marketing/spec.py @@ -1,7 +1,6 @@ # # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # - import logging from datetime import datetime, timezone from enum import Enum @@ -17,7 +16,12 @@ logger = logging.getLogger("airbyte") -ValidFields = Enum("ValidEnums", AdsInsights.Field.__dict__) +# Those fields were removed as there were causing `Tried accessing nonexisting field on node type` error from Meta +# For more information, see https://github.com/airbytehq/airbyte/pull/38860 +_REMOVED_FIELDS = ["conversion_lead_rate", "cost_per_conversion_lead"] +adjusted_ads_insights_fields = {key: value for key, value in AdsInsights.Field.__dict__.items() if key not in _REMOVED_FIELDS} +ValidFields = Enum("ValidEnums", adjusted_ads_insights_fields) + ValidBreakdowns = Enum("ValidBreakdowns", AdsInsights.Breakdowns.__dict__) ValidActionBreakdowns = Enum("ValidActionBreakdowns", AdsInsights.ActionBreakdowns.__dict__) ValidCampaignStatuses = Enum("ValidCampaignStatuses", Campaign.EffectiveStatus.__dict__) diff --git a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py index f3d0426a273e..4912aee09d3b 100644 --- a/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py +++ b/airbyte-integrations/connectors/source-facebook-marketing/unit_tests/integration/test_ads_insights_action_product_id.py @@ -78,7 +78,6 @@ def _job_start_request( "catalog_segment_value_omni_purchase_roas", "catalog_segment_value_website_purchase_roas", "clicks", - "conversion_lead_rate", "conversion_rate_ranking", "conversion_values", "conversions", @@ -89,7 +88,6 @@ def _job_start_request( "cost_per_action_type", "cost_per_ad_click", "cost_per_conversion", - "cost_per_conversion_lead", "cost_per_estimated_ad_recallers", "cost_per_inline_link_click", "cost_per_inline_post_engagement", diff --git a/airbyte-integrations/connectors/source-bamboo-hr/.dockerignore b/airbyte-integrations/connectors/source-fleetio/.dockerignore similarity index 54% rename from airbyte-integrations/connectors/source-bamboo-hr/.dockerignore rename to airbyte-integrations/connectors/source-fleetio/.dockerignore index 5c87f0e881ba..590d28708316 100644 --- a/airbyte-integrations/connectors/source-bamboo-hr/.dockerignore +++ b/airbyte-integrations/connectors/source-fleetio/.dockerignore @@ -1,7 +1,6 @@ * !Dockerfile -!Dockerfile.test !main.py -!source_bamboo_hr +!source_fleetio !setup.py !secrets diff --git a/airbyte-integrations/connectors/source-fleetio/README.md b/airbyte-integrations/connectors/source-fleetio/README.md new file mode 100644 index 000000000000..7e720024bd1a --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/README.md @@ -0,0 +1,96 @@ +# Fleetio Source + +This is the repository for the Fleetio configuration based source connector. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.com/integrations/sources/fleetio). + +## Fleetio Overview +View the Fleetio website [here](https://fleetio.com). For for information the Fleetio API, check out out [Fleetio API Portal](https://developer.fleetio.com). + +## Local development + +### Prerequisites +* Python (~=3.9) +* Poetry (~=1.7) - installation instructions [here](https://python-poetry.org/docs/#installation) + + +### Installing the connector +From this connector directory, run: +```bash +poetry install --with dev +``` + +### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.com/integrations/sources/fleetio) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_fleetio/spec.yaml` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source fleetio test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +poetry run source-fleetio spec +poetry run source-fleetio check --config secrets/config.json +poetry run source-fleetio discover --config secrets/config.json +poetry run source-fleetio read --config secrets/config.json --catalog sample_files/configured_catalog.json +``` + +### Running unit tests +To run unit tests locally, from the connector directory run: +``` +poetry run pytest +``` + +### Building the docker image +1. Install [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) +2. Run the following command to build the docker image: +```bash +airbyte-ci connectors --name=source-fleetio build +``` + +An image will be available on your host with the tag `airbyte/source-fleetio:dev`. + +### Running as a docker container +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-fleetio:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-fleetio:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-fleetio:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-fleetio:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` + +### Running our CI test suite +You can run our full test suite locally using [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md): +```bash +airbyte-ci connectors --name=source-fleetio test +``` + +## Testing + +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. + + +### Dependency Management +All of your dependencies should be managed via Poetry. +To add a new dependency, run: +```bash +poetry add +``` + +Please commit the changes to `pyproject.toml` and `poetry.lock` files. + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests: `airbyte-ci connectors --name=source-fleetio test` +2. Bump the connector version (please follow [semantic versioning for connectors](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#semantic-versioning-for-connectors)): + - bump the `dockerImageTag` value in in `metadata.yaml` + - bump the `version` value in `pyproject.toml` +3. Make sure the `metadata.yaml` content is up to date. +4. Make sure the connector documentation and its changelog is up to date (`docs/integrations/sources/fleetio.md`). +5. Create a Pull Request: use [our PR naming conventions](https://docs.airbyte.com/contributing-to-airbyte/resources/pull-requests-handbook/#pull-request-title-convention). +6. Pat yourself on the back for being an awesome contributor. +7. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. +8. Once your PR is merged, the new version of the connector will be automatically published to Docker Hub and our connector registry. \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-fleetio/__init__.py b/airbyte-integrations/connectors/source-fleetio/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-fleetio/acceptance-test-config.yml b/airbyte-integrations/connectors/source-fleetio/acceptance-test-config.yml new file mode 100644 index 000000000000..c3086c67da50 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/acceptance-test-config.yml @@ -0,0 +1,25 @@ +# See [Connector Acceptance Tests](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference) +# for more information about how to configure these tests +connector_image: airbyte/source-fleetio:dev +acceptance_tests: + spec: + tests: + - spec_path: "source_fleetio/spec.yaml" + connection: + tests: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + tests: + - config_path: "secrets/config.json" + basic_read: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + empty_streams: [] + full_refresh: + tests: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-fleetio/icon.svg b/airbyte-integrations/connectors/source-fleetio/icon.svg new file mode 100644 index 000000000000..3a6b8598eb88 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/icon.svg @@ -0,0 +1 @@ +fleetio-logo-mark-only \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-fleetio/integration_tests/__init__.py b/airbyte-integrations/connectors/source-fleetio/integration_tests/__init__.py new file mode 100644 index 000000000000..c941b3045795 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/integration_tests/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# diff --git a/airbyte-integrations/connectors/source-fleetio/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-fleetio/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..19387ff587f1 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/integration_tests/abnormal_state.json @@ -0,0 +1,32 @@ +{ + "issues": { + "id": "" + }, + "service_entries": { + "id": "" + }, + "submitted_inspection_forms": { + "id": "" + }, + "vehicles": { + "id": "" + }, + "contacts": { + "id": "" + }, + "expense_entries": { + "id": "" + }, + "fuel_entries": { + "id": "" + }, + "parts": { + "id": "" + }, + "purchase_orders": { + "id": "" + }, + "vehicle_assignments": { + "id": "" + } +} diff --git a/airbyte-integrations/connectors/source-fleetio/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-fleetio/integration_tests/acceptance.py new file mode 100644 index 000000000000..9e6409236281 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/integration_tests/acceptance.py @@ -0,0 +1,16 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import pytest + +pytest_plugins = ("connector_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-fleetio/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-fleetio/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..d91b6cb561d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/integration_tests/configured_catalog.json @@ -0,0 +1,94 @@ +{ + "streams": [ + { + "stream": { + "name": "issues", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "service_entries", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "submitted_inspection_forms", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "vehicles", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "contacts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "expense_entries", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "fuel_entries", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "parts", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "purchase_orders", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + }, + { + "stream": { + "name": "vehicle_assignments", + "json_schema": {}, + "supported_sync_modes": ["full_refresh"] + }, + "sync_mode": "full_refresh", + "destination_sync_mode": "append" + } + ] +} diff --git a/airbyte-integrations/connectors/source-fleetio/integration_tests/expected_records.jsonl b/airbyte-integrations/connectors/source-fleetio/integration_tests/expected_records.jsonl new file mode 100644 index 000000000000..fc0434c91665 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/integration_tests/expected_records.jsonl @@ -0,0 +1,10 @@ +{"stream": "contacts", "data": {"id": 0, "created_at": "2023-03-14T13:46:27-06:00", "updated_at": "2023-03-14T13:46:27-06:00", "archived_at": "2023-03-14T13:46:27-06:00", "email": "string", "name": "string", "first_name": "string", "middle_name": "string", "last_name": "string", "group_id": 0, "group_name": "string", "group_hierarchy": "Group 1|Group 2|Group 3", "technician": true, "vehicle_operator": true, "employee": true, "birth_date": "2023-03-14", "street_address": "123 Main St", "street_address_line_2": "Apt 1", "city": "string", "region": "string", "postal_code": "string", "country": "string", "employee_number": "string", "job_title": "string", "license_class": "string", "license_number": "string", "license_state": "string", "home_phone_number": "string", "mobile_phone_number": "string", "work_phone_number": "string", "other_phone_number": "string", "start_date": "2024-02-22", "leave_date": "2024-02-22", "hourly_labor_rate_cents": 0, "attachment_permissions": { "read_photos": true, "manage_photos": true, "read_documents": true, "manage_documents": true }, "default_image_url": "string", "account_membership_id": 0, "images": [ { "id": 0, "created_at": "2023-03-14T13:46:27-06:00", "updated_at": "2023-03-14T13:46:27-06:00", "imageable_id": 0, "imageable_type": "ExpenseEntry", "file_name": "string", "file_mime_type": "image/jpeg", "file_size": 0, "file_url": "string", "full_url": "string" } ], "images_count": 0}} +{"stream": "expense_entries", "data": {"created_at": "string", "custom_fields": {}, "expense_entry_type_id": 0, "expense_entry_type_name": "string", "id": 0, "notes": "string", "occurred_at": "string", "total_amount": 0, "total_amount_cents": 0, "updated_at": "string", "vehicle_id": 0, "vehicle_name": "string", "vehicle": {}, "vendor_id": 0, "vendor_name": "string", "vendor": {}}} +{"stream": "fuel_entries", "data": {"attachment_permissions": {}, "comments_count": 0, "created_at": "2023-03-14T13:46:27-06:00", "custom_fields": {}, "date": "2023-03-14T13:46:27-06:00", "external_id": "string", "fuel_type_id": 0, "fuel_type_name": "string", "kpl": "string", "documents_count": 0, "fuel_economy_units_for_current_user": "string", "id": 0, "images_count": 0, "is_sample": true, "liters": "string", "meter_entry": { "id": 0, "created_at": "2023-03-14T13:46:27-06:00", "updated_at": "2023-03-14T13:46:27-06:00", "auto_voided_at": "2023-03-14T13:46:27-06:00", "category": "starting", "meter_type": "secondary", "meterable_id": 0, "meterable_type": "FuelEntry", "value": "string", "vehicle_id": 0, "void": false, "type": "GpsMeterEntry", "date": "2023-03-14" }, "partial": true, "personal": true, "price_per_volume_unit": "string", "reference": "string", "region": "string", "reset": true, "uk_gallons_per_hr": "string", "us_gallons_per_hr": "string", "usage_in_hr": "string", "usage_in_km": "string", "usage_in_mi": "string", "liters_per_hr": "string", "lp100k": "string", "mpg_uk": "string", "mpg_us": "string", "total_amount": "string", "total_amount_cents": 0, "type": "string", "uk_gallons": "string", "updated_at": "string", "cost_per_hr": "string", "cost_per_km": "string", "cost_per_mi": "string", "us_gallons": "string", "vehicle_id": 0, "vehicle_name": "string", "vehicle": {}, "vendor_id": 0, "vendor": {}, "watchers_count": 0}} +{"stream": "issues", "data": {"asset_type": "string", "name": "string", "closed_at": "2023-03-14T13:46:27-06:00", "closed_by": {}, "closed_note": "string", "comments_count": 0, "created_at": "2023-03-14T13:46:27-06:00", "creation_type": "string", "description": "string", "documents_count": 0, "due_date": "2023-03-14T13:46:27-06:00", "due_meter_value": 0, "due_secondary_meter_value": 0, "equipment": {}, "attachment_permissions": {}, "external_id": 0, "fault_id": 0, "fault": 0, "type": {}, "fault_rule": {}, "id": 0, "images_count": 0, "number": "string", "reported_at": "2023-03-14T13:46:27-06:00", "reported_by": {}, "due_primary_meter_value": "string", "overdue": true, "resolvable_id": 0, "resolvable": {}, "resolvable_type": "string", "resolved_at": "string", "state": "string", "submitted_inspection_form_id": 0, "submitted_inspection_form": {}, "summary": "string", "vehicle": {}, "updated_at": "2023-03-14T13:46:27-06:00", "watchers_count": 0}} +{"stream": "parts", "data": {"attachment_permissions": {}, "comments_count": 0, "created_at": "string", "custom_fields": {}, "default_image_url": "string", "default_image_url_large": "string", "default_image_url_medium": "string", "default_image_url_small": "string", "description": "string", "documents_count": 0, "id": 0, "images_count": 0, "manufacturer_part_number": "string", "measurement_unit_id": 0, "measurement_unit_name": "string", "number": "string", "part_category": {}, "part_category_id": 0, "part_category_name": "string", "part_manufacturer": {}, "part_manufacturer_id": 0, "part_manufacturer_name": "string", "unit_cost": 0, "unit_cost_cents": 0, "average_unit_cost_cents": 0, "upc": "string", "archived_at": "2023-03-14T13:46:27-06:00", "updated_at": "2023-03-14T13:46:27-06:00"}} +{"stream": "purchase_orders", "data": {"approved_at": "2023-03-14T13:46:27-06:00", "approved_by": "string", "comments_count": 0, "created_at": "2023-03-14T13:46:27-06:00", "created_by": "string", "custom_fields": {}, "description": "string", "discount": 0, "discount_percentage": 0, "discount_type": "string", "documents_count": 0, "id": 0, "number": 0, "part_location_id": 0, "part_location_name": "string", "shipping": 0, "state": "string", "subtotal": 0, "tax_1": 0, "tax_1_percentage": "string", "tax_1_type": "string", "tax_2": 0, "tax_2_percentage": "string", "tax_2_type": "string", "total_amount": 0, "watchers_count": 0, "status_name": "string", "status_color": "string", "submitted_for_approval_at": "2023-03-14T13:46:27-06:00", "rejected_at": "2023-03-14T13:46:27-06:00", "purchased_at": "2023-03-14T13:46:27-06:00", "received_partial_at": "2023-03-14T13:46:27-06:00", "received_full_at": "2023-03-14T13:46:27-06:00", "closed_at": "2023-03-14T13:46:27-06:00", "discount_cents": 0, "tax_1_cents": 0, "tax_2_cents": 0, "subtotal_cents": 0, "shipping_cents": 0, "total_amount_cents": 0, "approved_by_id": 0, "created_by_id": 0, "type": 0, "closed_by_id": 0, "purchased_by_id": 0, "rejected_by_id": 0, "received_partial_by_id": 0, "received_full_by_id": 0, "purchase_order_status_id": 0, "submitted_for_approval_by_id": 0, "updated_at": "2023-03-14T13:46:27-06:00", "vendor_id": 0, "vendor_name": "string", "vendor": {}, "labels": []}} +{"stream": "service_entries", "data": {"attachment_permissions": {}, "auto_integrate_repair_order_status": "string", "invoice": {}, "warranty_credits_cents": 0, "warranty_credits_percentage": "string", "warranty_credits_type": "string", "labor_subtotal_cents": 0, "parts_subtotal_cents": 0, "subtotal_cents": 0, "discout_cents": 0, "tax_1_cents": 0, "tax_2_cents": 0, "total_amount_cents": 0, "warrant_credits_cents": 0, "comments_count": 0, "completed_at": "2023-03-14T13:46:27-06:00", "created_at": "2023-03-14T13:46:27-06:00", "discount": 0, "discount_percentage": "string", "fees_cents": 0, "discount_type": "string", "discount_cents": 0, "documents_count": 0, "general_notes": "string", "id": 0, "images_count": 0, "is_sample": 0, "labels": [], "labor_subtotal": 0, "primary_meter_entry": {}, "secondary_meter_entry": {}, "parts_subtotal": 0, "reference": "string", "started_at": "2023-03-14T13:46:27-06:00", "status": "string", "tax_1": 0, "tax_1_percentage": "string", "tax_1_type": "string", "tax_2": 0, "tax_2_percentage": "string", "tax_2_type": "string", "updated_at": "2023-03-14T13:46:27-06:00", "vehicle_id": 0, "vehicle": {}, "vendor_id": 0, "vendor_name": "string", "vendor": {}, "vmrs_repair_priority_class": {}, "work_order_id": 0, "work_order_number": 0}} +{"stream": "submitted_inspection_forms", "data": {"date": "2023-03-14T13:46:27-06:00", "failed_items": 0, "id": 0, "inspection_form": {}, "started_at": "2023-03-14T13:46:27-06:00", "starting_latitude": 0, "starting_longitude": 0, "submitted_at": "2023-03-14T13:46:27-06:00", "submitted_latitude": 0, "submitted_longitude": 0, "user": {}, "vehicle": {}}} +{"stream": "vehicle_assignments", "data": {"attachment_permissions": {}, "contact": {}, "vehicle": {}, "comments_count": 0, "contact_full_name": "string", "contact_id": 0, "started_at": "2023-03-14T13:46:27-06:00", "ended_at": "2023-03-14T13:46:27-06:00", "contact_image_url": "string", "created_at": "2023-03-14T13:46:27-06:00", "current": true, "custom_fields": {}, "future": true, "starting_meter_entry_value": "string", "ending_meter_entry_value": "string", "id": 0, "updated_at": "2023-03-14T13:46:27-06:00", "vehicle_id": 0}} +{"stream": "vehicles", "data": {"account_id": 0, "archived_at": "2023-03-14T13:46:27-06:00", "fuel_type_id": 0, "fuel_type_name": "string", "ai_enabled": true, "assetable_type": "string", "color": "string", "comments_count": 0, "created_at": "2023-03-14T13:46:27-06:00", "current_location_entry_id": 0, "default_image_url_small": "string", "external_ids": {}, "documents_count": 0, "estimated_replacement_mileage": "string", "estimated_resale_price_cents": 0, "estimated_service_months": 0, "fuel_entries_count": 0, "fuel_volume_units": "string", "group_ancestry": "string", "group_id": 0, "group_name": "string", "id": 0, "images_count": 0, "in_service_date": "string", "in_service_meter_value": "string", "is_sample": true, "issues_count": 0, "labels": [], "license_plate": "string", "make": "string", "model": "string", "name": "string", "out_of_service_date": "2023-03-14T13:46:27-06:00", "out_of_service_meter_value": "string", "ownership": "string", "primary_meter_date": "2023-03-14T13:46:27-06:00", "primary_meter_unit": "string", "primary_meter_usage_per_day": "string", "secondary_meter_unit": "string", "secondary_meter_date": "2023-03-14T13:46:27-06:00", "primary_meter_value": "string", "registration_expiration_month": "string", "secondary_meter_usage_per_day": "string", "secondary_meter_value": "string", "service_entries_count": 0, "service_reminders_count": 0, "system_of_measurement": "string", "trim": "string", "registration_state": "string", "updated_at": "2023-03-14T13:46:27-06:00", "vehicle_renewal_reminders_count": 0, "vehicle_status_color": "string", "vehicle_status_id": 0, "vehicle_status_name": "string", "vehicle_type_id": 0, "vehicle_type_name": "string", "vin": "string", "work_orders_count": 0, "year": 0}} \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-fleetio/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-fleetio/integration_tests/invalid_config.json new file mode 100644 index 000000000000..3ce03290c9c9 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/integration_tests/invalid_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "", + "account_token": "" +} diff --git a/airbyte-integrations/connectors/source-fleetio/integration_tests/sample_config.json b/airbyte-integrations/connectors/source-fleetio/integration_tests/sample_config.json new file mode 100644 index 000000000000..d30c1fad4657 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/integration_tests/sample_config.json @@ -0,0 +1,4 @@ +{ + "api_key": "xxx", + "account_token": "xxx" +} diff --git a/airbyte-integrations/connectors/source-fleetio/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-fleetio/integration_tests/sample_state.json new file mode 100644 index 000000000000..0967ef424bce --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/integration_tests/sample_state.json @@ -0,0 +1 @@ +{} diff --git a/airbyte-integrations/connectors/source-fleetio/main.py b/airbyte-integrations/connectors/source-fleetio/main.py new file mode 100644 index 000000000000..704b6d57eee7 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/main.py @@ -0,0 +1,9 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from source_fleetio.run import run + +if __name__ == "__main__": + run() diff --git a/airbyte-integrations/connectors/source-fleetio/metadata.yaml b/airbyte-integrations/connectors/source-fleetio/metadata.yaml new file mode 100644 index 000000000000..de64784b3a09 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/metadata.yaml @@ -0,0 +1,29 @@ +data: + registries: + cloud: + enabled: true + oss: + enabled: true + connectorBuildOptions: + baseImage: docker.io/airbyte/python-connector-base:1.2.0@sha256:c22a9d97464b69d6ef01898edf3f8612dc11614f05a84984451dde195f337db9 + connectorSubtype: api + connectorType: source + definitionId: 13a7652d-1d94-4033-931a-613d22d3cbb3 + dockerImageTag: 0.1.0 + dockerRepository: airbyte/source-fleetio + githubIssueLabel: source-fleetio + icon: icon.svg + license: MIT + name: Fleetio + remoteRegistries: + pypi: + enabled: true + packageName: airbyte-source-fleetio + releaseDate: "2024-02-12" + releaseStage: alpha + documentationUrl: https://docs.airbyte.com/integrations/sources/fleetio + tags: + - cdk:low-code + - language:python + +metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-fleetio/poetry.lock b/airbyte-integrations/connectors/source-fleetio/poetry.lock new file mode 100644 index 000000000000..fd5eeabf8c59 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/poetry.lock @@ -0,0 +1,1052 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "airbyte-cdk" +version = "0.67.0" +description = "A framework for writing Airbyte Connectors." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte-cdk-0.67.0.tar.gz", hash = "sha256:cbbff1b3895c89313764a721870bb293a396c74bad8dd6e5c36a0c3b0a2f6a10"}, + {file = "airbyte_cdk-0.67.0-py3-none-any.whl", hash = "sha256:2082c859536a2450c03b89dba1bbdab21bad314fbf5ef6d2e86fefc4ba935373"}, +] + +[package.dependencies] +airbyte-protocol-models = "0.5.1" +backoff = "*" +cachetools = "*" +Deprecated = ">=1.2,<2.0" +dpath = ">=2.0.1,<2.1.0" +genson = "1.2.2" +isodate = ">=0.6.1,<0.7.0" +Jinja2 = ">=3.1.2,<3.2.0" +jsonref = ">=0.2,<1.0" +jsonschema = ">=3.2.0,<3.3.0" +pendulum = "<3.0.0" +pydantic = ">=1.10.8,<2.0.0" +pyrate-limiter = ">=3.1.0,<3.2.0" +python-dateutil = "*" +PyYAML = ">=6.0.1" +requests = "*" +requests-cache = "*" +wcmatch = "8.4" + +[package.extras] +dev = ["avro (>=1.11.2,<1.12.0)", "cohere (==4.21)", "fastavro (>=1.8.0,<1.9.0)", "freezegun", "langchain (==0.0.271)", "markdown", "mypy", "openai[embeddings] (==0.27.9)", "pandas (==2.0.3)", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "pytest", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests-mock", "tiktoken (==0.4.0)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +file-based = ["avro (>=1.11.2,<1.12.0)", "fastavro (>=1.8.0,<1.9.0)", "markdown", "pdf2image (==1.16.3)", "pdfminer.six (==20221105)", "pyarrow (==12.0.1)", "pytesseract (==0.3.10)", "unstructured (==0.10.27)", "unstructured.pytesseract (>=0.3.12)", "unstructured[docx,pptx] (==0.10.27)"] +sphinx-docs = ["Sphinx (>=4.2,<5.0)", "sphinx-rtd-theme (>=1.0,<2.0)"] +vector-db-based = ["cohere (==4.21)", "langchain (==0.0.271)", "openai[embeddings] (==0.27.9)", "tiktoken (==0.4.0)"] + +[[package]] +name = "airbyte-protocol-models" +version = "0.5.1" +description = "Declares the Airbyte Protocol." +optional = false +python-versions = ">=3.8" +files = [ + {file = "airbyte_protocol_models-0.5.1-py3-none-any.whl", hash = "sha256:dfe84e130e51ce2ae81a06d5aa36f6c5ce3152b9e36e6f0195fad6c3dab0927e"}, + {file = "airbyte_protocol_models-0.5.1.tar.gz", hash = "sha256:7c8b16c7c1c7956b1996052e40585a3a93b1e44cb509c4e97c1ee4fe507ea086"}, +] + +[package.dependencies] +pydantic = ">=1.9.2,<2.0.0" + +[[package]] +name = "atomicwrites" +version = "1.4.1" +description = "Atomic file writes." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, +] + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "cattrs" +version = "23.2.3" +description = "Composable complex class support for attrs and dataclasses." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cattrs-23.2.3-py3-none-any.whl", hash = "sha256:0341994d94971052e9ee70662542699a3162ea1e0c62f7ce1b4a57f563685108"}, + {file = "cattrs-23.2.3.tar.gz", hash = "sha256:a934090d95abaa9e911dac357e3a8699e0b4b14f8529bcc7d2b1ad9d51672b9f"}, +] + +[package.dependencies] +attrs = ">=23.1.0" +exceptiongroup = {version = ">=1.1.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.1.0,<4.6.3 || >4.6.3", markers = "python_version < \"3.11\""} + +[package.extras] +bson = ["pymongo (>=4.4.0)"] +cbor2 = ["cbor2 (>=5.4.6)"] +msgpack = ["msgpack (>=1.0.5)"] +orjson = ["orjson (>=3.9.2)"] +pyyaml = ["pyyaml (>=6.0)"] +tomlkit = ["tomlkit (>=0.11.8)"] +ujson = ["ujson (>=5.7.0)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecated" +version = "1.2.14" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.14-py2.py3-none-any.whl", hash = "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c"}, + {file = "Deprecated-1.2.14.tar.gz", hash = "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] + +[[package]] +name = "dpath" +version = "2.0.8" +description = "Filesystem-like pathing and searching for dictionaries" +optional = false +python-versions = ">=3.7" +files = [ + {file = "dpath-2.0.8-py3-none-any.whl", hash = "sha256:f92f595214dd93a00558d75d4b858beee519f4cffca87f02616ad6cd013f3436"}, + {file = "dpath-2.0.8.tar.gz", hash = "sha256:a3440157ebe80d0a3ad794f1b61c571bef125214800ffdb9afc9424e8250fe9b"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "genson" +version = "1.2.2" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +files = [ + {file = "genson-1.2.2.tar.gz", hash = "sha256:8caf69aa10af7aee0e1a1351d1d06801f4696e005f06cedef438635384346a16"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonref" +version = "0.3.0" +description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." +optional = false +python-versions = ">=3.3,<4.0" +files = [ + {file = "jsonref-0.3.0-py3-none-any.whl", hash = "sha256:9480ad1b500f7e795daeb0ef29f9c55ae3a9ab38fb8d6659b6f4868acb5a5bc8"}, + {file = "jsonref-0.3.0.tar.gz", hash = "sha256:68b330c6815dc0d490dbb3d65ccda265ddde9f7856fd2f3322f971d456ea7549"}, +] + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = "*" +files = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0" +setuptools = "*" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pendulum" +version = "2.1.2" +description = "Python datetimes made easy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, + {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, + {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, + {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, + {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, + {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, + {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, + {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, + {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, + {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, + {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, + {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, + {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, + {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, + {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, + {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, +] + +[package.dependencies] +python-dateutil = ">=2.6,<3.0" +pytzdata = ">=2020.1" + +[[package]] +name = "platformdirs" +version = "4.2.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] + +[[package]] +name = "pydantic" +version = "1.10.14" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, + {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, + {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, + {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, + {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, + {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, + {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, + {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, + {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, + {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, + {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, + {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, + {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, + {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, + {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, + {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, + {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, + {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, + {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, + {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, + {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, + {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, + {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pyrate-limiter" +version = "3.1.1" +description = "Python Rate-Limiter using Leaky-Bucket Algorithm" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "pyrate_limiter-3.1.1-py3-none-any.whl", hash = "sha256:c51906f1d51d56dc992ff6c26e8300e32151bc6cfa3e6559792e31971dfd4e2b"}, + {file = "pyrate_limiter-3.1.1.tar.gz", hash = "sha256:2f57eda712687e6eccddf6afe8f8a15b409b97ed675fe64a626058f12863b7b7"}, +] + +[package.extras] +all = ["filelock (>=3.0)", "redis (>=5.0.0,<6.0.0)"] +docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0,<5.0.0)", "sphinx-autodoc-typehints (>=1.17,<2.0)", "sphinx-copybutton (>=0.5)", "sphinxcontrib-apidoc (>=0.3,<0.4)"] + +[[package]] +name = "pyrsistent" +version = "0.20.0" +description = "Persistent/Functional/Immutable data structures" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, + {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, + {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, + {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, + {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, + {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, + {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, + {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, + {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, + {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, + {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, + {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, + {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, + {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, + {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, + {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, + {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, +] + +[[package]] +name = "pytest" +version = "6.2.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytzdata" +version = "2020.1" +description = "The Olson timezone database for Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, + {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-cache" +version = "1.2.0" +description = "A persistent cache for python requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests_cache-1.2.0-py3-none-any.whl", hash = "sha256:490324301bf0cb924ff4e6324bd2613453e7e1f847353928b08adb0fdfb7f722"}, + {file = "requests_cache-1.2.0.tar.gz", hash = "sha256:db1c709ca343cc1cd5b6c8b1a5387298eceed02306a6040760db538c885e3838"}, +] + +[package.dependencies] +attrs = ">=21.2" +cattrs = ">=22.2" +platformdirs = ">=2.5" +requests = ">=2.22" +url-normalize = ">=1.4" +urllib3 = ">=1.25.5" + +[package.extras] +all = ["boto3 (>=1.15)", "botocore (>=1.18)", "itsdangerous (>=2.0)", "pymongo (>=3)", "pyyaml (>=6.0.1)", "redis (>=3)", "ujson (>=5.4)"] +bson = ["bson (>=0.5)"] +docs = ["furo (>=2023.3,<2024.0)", "linkify-it-py (>=2.0,<3.0)", "myst-parser (>=1.0,<2.0)", "sphinx (>=5.0.2,<6.0.0)", "sphinx-autodoc-typehints (>=1.19)", "sphinx-automodapi (>=0.14)", "sphinx-copybutton (>=0.5)", "sphinx-design (>=0.2)", "sphinx-notfound-page (>=0.8)", "sphinxcontrib-apidoc (>=0.3)", "sphinxext-opengraph (>=0.9)"] +dynamodb = ["boto3 (>=1.15)", "botocore (>=1.18)"] +json = ["ujson (>=5.4)"] +mongodb = ["pymongo (>=3)"] +redis = ["redis (>=3)"] +security = ["itsdangerous (>=2.0)"] +yaml = ["pyyaml (>=6.0.1)"] + +[[package]] +name = "requests-mock" +version = "1.11.0" +description = "Mock out responses from the requests package" +optional = false +python-versions = "*" +files = [ + {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, + {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, +] + +[package.dependencies] +requests = ">=2.3,<3" +six = "*" + +[package.extras] +fixture = ["fixtures"] +test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] + +[[package]] +name = "responses" +version = "0.19.0" +description = "A utility library for mocking out the `requests` Python library." +optional = false +python-versions = ">=3.7" +files = [ + {file = "responses-0.19.0-py3-none-any.whl", hash = "sha256:53354b5de163aa2074312c71d8ebccb8bd1ab336cff7053abb75e84dc5637abe"}, + {file = "responses-0.19.0.tar.gz", hash = "sha256:3fc29c3117e14136b833a0a6d4e7f1217c6301bf08b6086db468e12f1e3290e2"}, +] + +[package.dependencies] +requests = ">=2.0,<3.0" +urllib3 = ">=1.25.10" + +[package.extras] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-localserver", "types-mock", "types-requests"] + +[[package]] +name = "setuptools" +version = "69.1.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.10.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, +] + +[[package]] +name = "url-normalize" +version = "1.4.3" +description = "URL normalization for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "url-normalize-1.4.3.tar.gz", hash = "sha256:d23d3a070ac52a67b83a1c59a0e68f8608d1cd538783b401bc9de2c0fac999b2"}, + {file = "url_normalize-1.4.3-py2.py3-none-any.whl", hash = "sha256:ec3c301f04e5bb676d333a7fa162fa977ad2ca04b7e652bfc9fac4e405728eed"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcmatch" +version = "8.4" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.7" +files = [ + {file = "wcmatch-8.4-py3-none-any.whl", hash = "sha256:dc7351e5a7f8bbf4c6828d51ad20c1770113f5f3fd3dfe2a03cfde2a63f03f98"}, + {file = "wcmatch-8.4.tar.gz", hash = "sha256:ba4fc5558f8946bf1ffc7034b05b814d825d694112499c86035e0e4d398b6a67"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + +[[package]] +name = "wrapt" +version = "1.16.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.6" +files = [ + {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, + {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, + {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, + {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, + {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, + {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, + {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, + {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, + {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, + {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, + {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, + {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, + {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, + {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, + {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, + {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, + {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, + {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, + {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, + {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, + {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, + {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, + {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, + {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, + {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, + {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, + {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, + {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, + {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, + {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, + {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, + {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, + {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, + {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, + {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, + {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, + {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9,<3.12" +content-hash = "d6396b18d4d035c4147663f83fb99b6afd3955fe58a739c014dd4fea75c01d73" diff --git a/airbyte-integrations/connectors/source-fleetio/pyproject.toml b/airbyte-integrations/connectors/source-fleetio/pyproject.toml new file mode 100644 index 000000000000..e535c0a3e204 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/pyproject.toml @@ -0,0 +1,25 @@ +[tool.poetry] +version = "0.1.0" +name = "source-fleetio" +description = "Source implementation for Fleetio." +authors = ["John Michael Mizerany "] +license = "MIT" +readme = "README.md" +documentation = "https://docs.airbyte.com/integrations/sources/fleetio" +homepage = "https://airbyte.com" +repository = "https://github.com/airbytehq/airbyte" +[[tool.poetry.packages]] +include = "source_fleetio" + +[tool.poetry.dependencies] +python = "^3.9,<3.12" +airbyte-cdk = "==0.67.0" +pytest = "==6.2.5" + +[tool.poetry.scripts] +source-fleetio = "source_fleetio.run:run" + +[tool.poetry.group.dev.dependencies] +pytest-mock = "^3.6.1" +responses = "^0.19.0" +requests-mock = "^1.9.3" \ No newline at end of file diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/__init__.py b/airbyte-integrations/connectors/source-fleetio/source_fleetio/__init__.py new file mode 100644 index 000000000000..11b4e8675c63 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/__init__.py @@ -0,0 +1,8 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +from .source import SourceFleetio + +__all__ = ["SourceFleetio"] diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/manifest.yaml b/airbyte-integrations/connectors/source-fleetio/source_fleetio/manifest.yaml new file mode 100644 index 000000000000..54340b77a045 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/manifest.yaml @@ -0,0 +1,430 @@ +version: 0.40.5 +type: DeclarativeSource +check: + type: CheckStream + stream_names: + - submitted_inspection_forms +streams: + - name: submitted_inspection_forms + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v1/submitted_inspection_forms + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: issues + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v2/issues + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: service_entries + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v2/service_entries + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: vehicles + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v1/vehicles + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: expense_entries + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v1/expense_entries + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: contacts + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v2/contacts + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: fuel_entries + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v1/fuel_entries + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: parts + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v1/parts + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: purchase_orders + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v1/purchase_orders + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id + - name: vehicle_assignments + type: DeclarativeStream + retriever: + type: SimpleRetriever + paginator: + type: DefaultPaginator + page_token_option: + type: RequestOption + field_name: start_cursor + inject_into: request_parameter + pagination_strategy: + type: CursorPagination + cursor_value: '{{ response.get("next_cursor", {}) }}' + stop_condition: '{{ not response.get("next_cursor", {}) }}' + requester: + path: /v1/vehicle_assignments + type: HttpRequester + url_base: https://secure.fleetio.com/api/ + http_method: GET + request_parameters: + per_page: "100" + authenticator: + type: NoAuth + request_headers: + Account-Token: "{{ config['account_token'] }}" + Authorization: Token {{ config['api_key'] }} + X-Api-Version: "2024-03-15" + X-Client-Name: data_connector + X-Client-Platform: fleetio_airbyte + request_body_json: {} + record_selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: + - records + partition_router: [] + primary_key: + - id +spec: + connection_specification: + $schema: http://json-schema.org/draft-07/schema# + type: object + required: + - api_key + - account_token + properties: + api_key: + type: string + order: 0 + title: api_key + airbyte_secret: true + account_token: + type: string + order: 1 + title: account_token + airbyte_secret: true + additionalProperties: true + documentation_url: https://docs.airbyte.com/integrations/sources/fleetio + type: Spec +metadata: + autoImportSchema: + submitted_inspection_forms: true + issues: true + service_entries: true + vehicles: true + expense_entries: true + contacts: true + fuel_entries: true + parts: true + purchase_orders: true + vehicle_assignments: true diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/run.py b/airbyte-integrations/connectors/source-fleetio/source_fleetio/run.py new file mode 100644 index 000000000000..6a80203c2bfc --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/run.py @@ -0,0 +1,14 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_fleetio import SourceFleetio + + +def run(): + source = SourceFleetio() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/contacts.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/contacts.json new file mode 100644 index 000000000000..3ef0cd8c66e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/contacts.json @@ -0,0 +1,130 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": true, + "properties": { + "comments_count": { + "type": ["number", "null"] + }, + "created_at": { + "type": ["string", "null"] + }, + "custom_fields": { + "type": ["object", "null"] + }, + "documents_count": { + "type": ["number", "null"] + }, + "email": { + "type": ["string", "null"] + }, + "employee": { + "type": ["boolean", "null"] + }, + "first_name": { + "type": ["string", "null"] + }, + "middle_name": { + "type": ["string", "null"] + }, + "group_id": { + "type": ["number", "null"] + }, + "group_name": { + "type": ["string", "null"] + }, + "group_hierarchy": { + "type": ["string", "null"] + }, + "id": { + "type": "number" + }, + "images_count": { + "type": ["number", "null"] + }, + "last_name": { + "type": ["string", "null"] + }, + "name": { + "type": ["string", "null"] + }, + "technician": { + "type": ["boolean", "null"] + }, + "updated_at": { + "type": "string" + }, + "vehicle_operator": { + "type": ["boolean", "null"] + }, + "birth_date": { + "type": ["string", "null"] + }, + "street_address": { + "type": ["string", "null"] + }, + "street_address_line_2": { + "type": ["string", "null"] + }, + "city": { + "type": ["string", "null"] + }, + "country": { + "type": ["string", "null"] + }, + "employee_number": { + "type": ["string", "null"] + }, + "home_phone_number": { + "type": ["string", "null"] + }, + "job_title": { + "type": ["string", "null"] + }, + "leave_date": { + "type": ["string", "null"] + }, + "license_class": { + "type": ["string", "null"] + }, + "license_plate": { + "type": ["string", "null"] + }, + "license_number": { + "type": ["string", "null"] + }, + "license_state": { + "type": ["string", "null"] + }, + "mobile_phone_number": { + "type": ["string", "null"] + }, + "other_phone_number": { + "type": ["string", "null"] + }, + "postal_code": { + "type": ["string", "null"] + }, + "region": { + "type": ["string", "null"] + }, + "start_date": { + "type": ["string", "null"] + }, + "work_phone_number": { + "type": ["string", "null"] + }, + "hourly_labor_rate_cents": { + "type": ["number", "null"] + }, + "attachment_permissions": { + "type": ["object", "null"] + }, + "default_image_url": { + "type": ["string", "null"] + }, + "account_membership_id": { + "type": ["number", "null"] + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/expense_entries.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/expense_entries.json new file mode 100644 index 000000000000..b9b986ef8535 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/expense_entries.json @@ -0,0 +1,55 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": true, + "properties": { + "created_at": { + "type": "string" + }, + "custom_fields": { + "type": ["object", "null"] + }, + "expense_entry_type_id": { + "type": ["number", "null"] + }, + "expense_entry_type_name": { + "type": ["string", "null"] + }, + "id": { + "type": "number" + }, + "notes": { + "type": ["string", "null"] + }, + "occurred_at": { + "type": ["string", "null"] + }, + "total_amount": { + "type": ["number", "null"] + }, + "total_amount_cents": { + "type": ["number", "null"] + }, + "updated_at": { + "type": "string" + }, + "vehicle_id": { + "type": ["number", "null"] + }, + "vehicle_name": { + "type": ["string", "null"] + }, + "vehicle": { + "type": ["object", "null"] + }, + "vendor_id": { + "type": ["number", "null"] + }, + "vendor_name": { + "type": ["string", "null"] + }, + "vendor": { + "type": ["object", "null"] + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/fuel_entries.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/fuel_entries.json new file mode 100644 index 000000000000..762dd56b7a42 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/fuel_entries.json @@ -0,0 +1,183 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": true, + "properties": { + "attachment_permissions": { + "type": "object" + }, + "comments_count": { + "type": ["number", "null"] + }, + "created_at": { + "type": ["string", "null"] + }, + "custom_fields": { + "type": ["object", "null"] + }, + "date": { + "type": ["string", "null"] + }, + "external_id": { + "type": ["string", "null"] + }, + "fuel_type_id": { + "type": ["number", "null"] + }, + "fuel_type_name": { + "type": ["string", "null"] + }, + "kpl": { + "type": ["string", "null"] + }, + "documents_count": { + "type": ["number", "null"] + }, + "fuel_economy_units_for_current_user": { + "type": ["string", "null"] + }, + "id": { + "type": "number" + }, + "images_count": { + "type": ["number", "null"] + }, + "is_sample": { + "type": "boolean" + }, + "liters": { + "type": ["string", "null"] + }, + "meter_entry": { + "properties": { + "created_at": { + "type": ["string", "null"] + }, + "date": { + "type": ["string", "null"] + }, + "id": { + "type": ["number", "null"] + }, + "meterable_id": { + "type": ["number", "null"] + }, + "meterable_type": { + "type": ["string", "null"] + }, + "updated_at": { + "type": ["string", "null"] + }, + "value": { + "type": ["string", "null"] + }, + "vehicle_id": { + "type": ["number", "null"] + }, + "void": { + "type": "boolean" + }, + "auto_voided_at": { + "type": ["string", "null"] + }, + "category": { + "type": ["string", "null"] + }, + "meter_type": { + "type": ["string", "null"] + }, + "type": { + "type": ["string", "null"] + } + }, + "type": "object" + }, + "partial": { + "type": ["boolean", "null"] + }, + "personal": { + "type": ["boolean", "null"] + }, + "price_per_volume_unit": { + "type": ["string", "null"] + }, + "reference": { + "type": ["string", "null"] + }, + "region": { + "type": ["string", "null"] + }, + "reset": { + "type": ["boolean", "null"] + }, + "uk_gallons_per_hr": { + "type": ["string", "null"] + }, + "us_gallons_per_hr": { + "type": ["string", "null"] + }, + "usage_in_hr": { + "type": ["string", "null"] + }, + "usage_in_km": { + "type": ["string", "null"] + }, + "usage_in_mi": { + "type": ["string", "null"] + }, + "liters_per_hr": { + "type": ["string", "null"] + }, + "lp100k": { + "type": ["string", "null"] + }, + "mpg_uk": { + "type": ["string", "null"] + }, + "mpg_us": { + "type": ["string", "null"] + }, + "total_amount": { + "type": ["string", "null"] + }, + "total_amount_cents": { + "type": ["number", "null"] + }, + "uk_gallons": { + "type": ["string", "null"] + }, + "updated_at": { + "type": ["string", "null"] + }, + "cost_per_hr": { + "type": ["string", "null"] + }, + "cost_per_km": { + "type": ["string", "null"] + }, + "cost_per_mi": { + "type": ["string", "null"] + }, + "us_gallons": { + "type": ["string", "null"] + }, + "vehicle_id": { + "type": ["number", "null"] + }, + "vehicle_name": { + "type": ["string", "null"] + }, + "vehicle": { + "type": ["object", "null"] + }, + "vendor_id": { + "type": ["number", "null"] + }, + "vendor": { + "type": ["object", "null"] + }, + "watchers_count": { + "type": ["number", "null"] + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/issues.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/issues.json new file mode 100644 index 000000000000..9a37dd0e771a --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/issues.json @@ -0,0 +1,148 @@ +{ + "type": "object", + "additionalProperties": true, + "properties": { + "asset_type": { + "type": ["string", "null"] + }, + "name": { + "type": ["string", "null"] + }, + "closed_at": { + "type": ["null", "string"] + }, + "closed_by": { + "properties": { + "id": { + "type": "number" + } + }, + "type": ["object", "null"] + }, + "closed_note": { + "type": ["string", "null"] + }, + "comments_count": { + "type": ["number", "null"] + }, + "created_at": { + "type": ["string"] + }, + "creation_type": { + "type": ["string", "null"] + }, + "description": { + "type": ["string", "null"] + }, + "documents_count": { + "type": "number" + }, + "due_date": { + "type": ["null", "string"] + }, + "due_meter_value": { + "type": ["null", "number"] + }, + "due_secondary_meter_value": { + "type": ["null", "number", "string"] + }, + "equipment": { + "type": ["null", "object"] + }, + "attachment_permissions": { + "type": ["null", "object"] + }, + "external_id": { + "type": ["null", "number"] + }, + "fault_id": { + "type": ["null", "number"] + }, + "fault": { + "properties": { + "id": { + "type": "number" + } + }, + "type": ["object", "null"] + }, + "fault_rule": { + "type": ["null", "object"] + }, + "id": { + "type": "number" + }, + "images_count": { + "type": "number" + }, + "number": { + "type": ["null", "string"] + }, + "reported_at": { + "type": "string" + }, + "reported_by": { + "properties": { + "id": { + "type": "number" + }, + "name": { + "type": ["null", "string"] + }, + "default_image_url": { + "type": ["null", "string"] + } + }, + "type": ["object", "null"] + }, + "due_primary_meter_value": { + "type": ["string", "null"] + }, + "overdue": { + "type": ["boolean", "null"] + }, + "resolvable_id": { + "type": ["number", "null"] + }, + "resolvable": { + "properties": { + "id": { + "type": "number" + } + }, + "type": ["object", "null"] + }, + "resolvable_type": { + "type": ["null", "string"] + }, + "resolved_at": { + "type": ["null", "string"] + }, + "state": { + "type": "string" + }, + "submitted_inspection_form_id": { + "type": ["null", "number"] + }, + "submitted_inspection_form": { + "properties": { + "id": { + "type": "number" + } + }, + "type": ["object", "null"] + }, + "summary": { + "type": ["string", "null"] + }, + "vehicle": { + "type": ["null", "object"] + }, + "updated_at": { + "type": ["string", "null"] + }, + "watchers_count": { + "type": ["number", "null"] + } + } +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/parts.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/parts.json new file mode 100644 index 000000000000..fb6866f901ab --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/parts.json @@ -0,0 +1,120 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": true, + "properties": { + "attachment_permissions": { + "type": "object" + }, + "comments_count": { + "type": "number" + }, + "created_at": { + "type": "string" + }, + "custom_fields": { + "type": ["object", "null"] + }, + "default_image_url": { + "type": ["null", "string"] + }, + "default_image_url_large": { + "type": ["null", "string"] + }, + "default_image_url_medium": { + "type": ["null", "string"] + }, + "default_image_url_small": { + "type": ["null", "string"] + }, + "description": { + "type": ["string", "null"] + }, + "documents_count": { + "type": ["number", "null"] + }, + "id": { + "type": "number" + }, + "images_count": { + "type": ["number", "null"] + }, + "manufacturer_part_number": { + "type": ["null", "string"] + }, + "measurement_unit_id": { + "type": ["null", "number"] + }, + "measurement_unit_name": { + "type": ["null", "string"] + }, + "number": { + "type": "string" + }, + "part_category": { + "properties": { + "id": { + "type": "number" + }, + "tire_category": { + "type": ["boolean", "null"] + }, + "account_id": { + "type": ["number", "null"] + }, + "default": { + "type": ["boolean", "null"] + }, + "name": { + "type": ["string", "null"] + }, + "description": { + "type": ["string", "null"] + }, + "tires_count": { + "type": ["string", "number", "null"] + }, + "created_at": { + "type": ["string", "null"] + }, + "updated_at": { + "type": ["string", "null"] + } + }, + "type": ["object", "null"] + }, + "part_category_id": { + "type": ["null", "number"] + }, + "part_category_name": { + "type": ["null", "string"] + }, + "part_manufacturer": { + "type": ["object", "null"] + }, + "part_manufacturer_id": { + "type": ["null", "number"] + }, + "part_manufacturer_name": { + "type": ["null", "string"] + }, + "unit_cost": { + "type": ["null", "number"] + }, + "unit_cost_cents": { + "type": ["string", "number", "null"] + }, + "average_unit_cost_cents": { + "type": ["string", "number", "null"] + }, + "upc": { + "type": ["null", "string"] + }, + "archived_at": { + "type": ["string", "null"] + }, + "updated_at": { + "type": ["string"] + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/purchase_orders.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/purchase_orders.json new file mode 100644 index 000000000000..4afdc155acac --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/purchase_orders.json @@ -0,0 +1,169 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": true, + "properties": { + "approved_at": { + "type": ["string", "null"] + }, + "approved_by": { + "type": ["string", "null"] + }, + "comments_count": { + "type": ["number", "null"] + }, + "created_at": { + "type": "string" + }, + "created_by": { + "type": ["string", "null"] + }, + "custom_fields": { + "type": ["object", "null"] + }, + "description": { + "type": ["string", "null"] + }, + "discount": { + "type": ["number", "null"] + }, + "discount_percentage": { + "type": ["number", "null"] + }, + "discount_type": { + "type": ["string", "null"] + }, + "documents_count": { + "type": ["number", "null"] + }, + "id": { + "type": "number" + }, + "number": { + "type": ["number", "null"] + }, + "part_location_id": { + "type": ["number", "null"] + }, + "part_location_name": { + "type": ["string", "null"] + }, + "shipping": { + "type": ["number", "null"] + }, + "state": { + "type": ["string", "null"] + }, + "subtotal": { + "type": ["number", "null"] + }, + "tax_1": { + "type": ["number", "null"] + }, + "tax_1_percentage": { + "type": ["string", "null"] + }, + "tax_1_type": { + "type": ["string", "null"] + }, + "tax_2": { + "type": ["number", "null"] + }, + "tax_2_percentage": { + "type": ["string", "null"] + }, + "tax_2_type": { + "type": ["string", "null"] + }, + "total_amount": { + "type": ["number", "null"] + }, + "watchers_count": { + "type": ["number", "string", "null"] + }, + "status_name": { + "type": ["string", "null"] + }, + "status_color": { + "type": ["string", "null"] + }, + "submitted_for_approval_at": { + "type": ["string", "null"] + }, + "rejected_at": { + "type": ["string", "null"] + }, + "purchased_at": { + "type": ["string", "null"] + }, + "received_partial_at": { + "type": ["string", "null"] + }, + "received_full_at": { + "type": ["string", "null"] + }, + "closed_at": { + "type": ["string", "null"] + }, + "discount_cents": { + "type": ["number", "string", "null"] + }, + "tax_1_cents": { + "type": ["number", "string", "null"] + }, + "tax_2_cents": { + "type": ["number", "string", "null"] + }, + "subtotal_cents": { + "type": ["number", "string", "null"] + }, + "shipping_cents": { + "type": ["number", "string", "null"] + }, + "total_amount_cents": { + "type": ["number", "string", "null"] + }, + "approved_by_id": { + "type": ["number", "null"] + }, + "created_by_id": { + "type": ["number", "null"] + }, + "closed_by_id": { + "type": ["number", "null"] + }, + "purchased_by_id": { + "type": ["number", "null"] + }, + "rejected_by_id": { + "type": ["number", "null"] + }, + "received_partial_by_id": { + "type": ["number", "null"] + }, + "received_full_by_id": { + "type": ["number", "null"] + }, + "purchase_order_status_id": { + "type": ["number", "null"] + }, + "submitted_for_approval_by_id": { + "type": ["number", "null"] + }, + "updated_at": { + "type": "string" + }, + "vendor_id": { + "type": ["number", "null"] + }, + "vendor_name": { + "type": ["string", "null"] + }, + "vendor": { + "type": ["object", "null"] + }, + "labels": { + "type": ["array", "null"] + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/service_entries.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/service_entries.json new file mode 100644 index 000000000000..8b6b2d9f5d0d --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/service_entries.json @@ -0,0 +1,181 @@ +{ + "type": "object", + "additionalProperties": true, + "properties": { + "attachment_permissions": { + "type": "object" + }, + "auto_integrate_repair_order_status": { + "type": ["string", "null"] + }, + "invoice": { + "type": ["object", "null"] + }, + "warranty_credits_cents": { + "type": ["string", "number", "null"] + }, + "warranty_credits_percentage": { + "type": ["string", "null"] + }, + "warranty_credits_type": { + "type": ["string", "null"] + }, + "labor_subtotal_cents": { + "type": ["string", "number", "null"] + }, + "parts_subtotal_cents": { + "type": ["string", "number", "null"] + }, + "subtotal_cents": { + "type": ["string", "number", "null"] + }, + "discout_cents": { + "type": ["string", "number", "null"] + }, + "tax_1_cents": { + "type": ["string", "number", "null"] + }, + "tax_2_cents": { + "type": ["string", "number", "null"] + }, + "total_amount_cents": { + "type": ["string", "number", "null"] + }, + "warrant_credits_cents": { + "type": ["string", "number", "null"] + }, + "comments_count": { + "type": "number" + }, + "completed_at": { + "type": "string" + }, + "created_at": { + "type": "string" + }, + "discount": { + "type": ["null", "number"] + }, + "discount_percentage": { + "type": ["null", "string"] + }, + "fees_cents": { + "type": ["number", "null"] + }, + "discount_type": { + "type": "string" + }, + "discount_cents": { + "type": ["number", "string", "null"] + }, + "documents_count": { + "type": "number" + }, + "general_notes": { + "type": ["null", "string"] + }, + "id": { + "type": "number" + }, + "images_count": { + "type": "number" + }, + "is_sample": { + "type": "boolean" + }, + "labels": { + "items": { + "properties": { + "id": { + "type": "number" + }, + "name": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, + "labor_subtotal": { + "type": ["null", "number"] + }, + "primary_meter_entry": { + "type": ["object", "null"] + }, + "secondary_meter_entry": { + "type": ["object", "null"] + }, + "parts_subtotal": { + "type": ["null", "number"] + }, + "reference": { + "type": ["null", "string"] + }, + "started_at": { + "type": ["null", "string"] + }, + "status": { + "type": "string" + }, + "tax_1": { + "type": ["null", "number"] + }, + "tax_1_percentage": { + "type": ["null", "string"] + }, + "tax_1_type": { + "type": ["null", "string"] + }, + "tax_2": { + "type": ["null", "number"] + }, + "tax_2_percentage": { + "type": ["null", "string"] + }, + "tax_2_type": { + "type": ["null", "string"] + }, + "updated_at": { + "type": "string" + }, + "vehicle_id": { + "type": "number" + }, + "vehicle": { + "type": ["object", "null"] + }, + "vendor_id": { + "type": ["null", "number"] + }, + "vendor_name": { + "type": ["null", "string"] + }, + "vendor": { + "type": ["object", "null"] + }, + "vmrs_repair_priority_class": { + "properties": { + "code": { + "type": "string" + }, + "color": { + "type": "string" + }, + "id": { + "type": "number" + }, + "name": { + "type": "string" + } + }, + "type": ["object", "null"] + }, + "work_order_id": { + "type": ["null", "number"] + }, + "work_order_number": { + "type": ["null", "number"] + } + } +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/submitted_inspection_forms.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/submitted_inspection_forms.json new file mode 100644 index 000000000000..232adae62471 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/submitted_inspection_forms.json @@ -0,0 +1,53 @@ +{ + "type": "object", + "additionalProperties": true, + "properties": { + "date": { + "type": "string" + }, + "failed_items": { + "type": "number" + }, + "id": { + "type": "number" + }, + "inspection_form": { + "properties": { + "description": { + "type": ["null", "string"] + }, + "id": { + "type": "number" + }, + "title": { + "type": "string" + } + }, + "type": "object" + }, + "started_at": { + "type": "string" + }, + "starting_latitude": { + "type": ["null", "number"] + }, + "starting_longitude": { + "type": ["null", "number"] + }, + "submitted_at": { + "type": "string" + }, + "submitted_latitude": { + "type": ["null", "number"] + }, + "submitted_longitude": { + "type": ["null", "number"] + }, + "user": { + "type": ["string", "object"] + }, + "vehicle": { + "type": ["object", "null"] + } + } +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/vehicle_assignments.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/vehicle_assignments.json new file mode 100644 index 000000000000..17549f1fb5e0 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/vehicle_assignments.json @@ -0,0 +1,78 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": true, + "properties": { + "attachment_permissions": { + "type": "object" + }, + "contact": { + "type": ["object", "null"] + }, + "vehicle": { + "type": ["object", "null"] + }, + "comments_count": { + "type": ["number", "null"] + }, + "contact_full_name": { + "type": ["string", "null"] + }, + "contact_id": { + "type": ["number", "null"] + }, + "started_at": { + "type": ["string", "null"] + }, + "ended_at": { + "type": ["string", "null"] + }, + "contact_image_url": { + "type": ["string", "null"] + }, + "created_at": { + "type": "string" + }, + "current": { + "type": "boolean" + }, + "custom_fields": { + "properties": { + "checkbox_type": { + "type": ["string", "null"] + }, + "department_using_vehicle": { + "type": ["string", "null"] + }, + "license_expiration_date": { + "type": ["string", "null"] + }, + "operator_id": { + "type": ["string", "null"] + }, + "test_restricted_custom_fields": { + "type": ["string", "null"] + } + }, + "type": "object" + }, + "future": { + "type": "boolean" + }, + "starting_meter_entry_value": { + "type": ["string", "null"] + }, + "ending_meter_entry_value": { + "type": ["string", "null"] + }, + "id": { + "type": ["number", "null"] + }, + "updated_at": { + "type": "string" + }, + "vehicle_id": { + "type": "number" + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/vehicles.json b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/vehicles.json new file mode 100644 index 000000000000..9270ced88aa3 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/schemas/vehicles.json @@ -0,0 +1,184 @@ +{ + "$schema": "http://json-schema.org/schema#", + "additionalProperties": true, + "properties": { + "account_id": { + "type": "number" + }, + "archived_at": { + "type": ["string", "null"] + }, + "fuel_type_id": { + "type": ["number", "null"] + }, + "fuel_type_name": { + "type": ["string", "null"] + }, + "ai_enabled": { + "type": "boolean" + }, + "assetable_type": { + "type": "string" + }, + "color": { + "type": ["null", "string"] + }, + "comments_count": { + "type": "number" + }, + "created_at": { + "type": "string" + }, + "current_location_entry_id": { + "type": ["null", "number"] + }, + "default_image_url_small": { + "type": ["null", "string"] + }, + "external_ids": { + "type": ["object", "null"] + }, + "documents_count": { + "type": "number" + }, + "estimated_replacement_mileage": { + "type": ["null", "string"] + }, + "estimated_resale_price_cents": { + "type": ["null", "number"] + }, + "estimated_service_months": { + "type": ["null", "number"] + }, + "fuel_entries_count": { + "type": "number" + }, + "fuel_volume_units": { + "type": "string" + }, + "group_ancestry": { + "type": ["null", "string"] + }, + "group_id": { + "type": ["null", "number"] + }, + "group_name": { + "type": ["null", "string"] + }, + "id": { + "type": "number" + }, + "images_count": { + "type": "number" + }, + "in_service_date": { + "type": ["null", "string"] + }, + "in_service_meter_value": { + "type": ["null", "string"] + }, + "is_sample": { + "type": "boolean" + }, + "issues_count": { + "type": "number" + }, + "labels": { + "type": "array" + }, + "license_plate": { + "type": ["null", "string"] + }, + "make": { + "type": ["null", "string"] + }, + "model": { + "type": ["null", "string"] + }, + "name": { + "type": "string" + }, + "out_of_service_date": { + "type": ["null", "string"] + }, + "out_of_service_meter_value": { + "type": ["null", "string"] + }, + "ownership": { + "type": "string" + }, + "primary_meter_date": { + "type": ["null", "string"] + }, + "primary_meter_unit": { + "type": "string" + }, + "primary_meter_usage_per_day": { + "type": ["null", "string"] + }, + "secondary_meter_unit": { + "type": ["string", "null"] + }, + "secondary_meter_date": { + "type": ["string", "null"] + }, + "primary_meter_value": { + "type": "string" + }, + "registration_expiration_month": { + "type": "number" + }, + "secondary_meter_usage_per_day": { + "type": ["null", "string"] + }, + "secondary_meter_value": { + "type": "string" + }, + "service_entries_count": { + "type": "number" + }, + "service_reminders_count": { + "type": "number" + }, + "system_of_measurement": { + "type": "string" + }, + "trim": { + "type": ["null", "string"] + }, + "registration_state": { + "type": ["string", "null"] + }, + "updated_at": { + "type": "string" + }, + "vehicle_renewal_reminders_count": { + "type": "number" + }, + "vehicle_status_color": { + "type": ["string", "null"] + }, + "vehicle_status_id": { + "type": "number" + }, + "vehicle_status_name": { + "type": "string" + }, + "vehicle_type_id": { + "type": "number" + }, + "vehicle_type_name": { + "type": "string" + }, + "vin": { + "type": ["null", "string"] + }, + "work_orders_count": { + "type": "number" + }, + "year": { + "type": ["null", "number"] + } + }, + "type": "object" +} diff --git a/airbyte-integrations/connectors/source-fleetio/source_fleetio/source.py b/airbyte-integrations/connectors/source-fleetio/source_fleetio/source.py new file mode 100644 index 000000000000..84d9aae91d9b --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/source_fleetio/source.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource + +""" +This file provides the necessary constructs to interpret a provided declarative YAML configuration file into +source connector. + +WARNING: Do not modify this file. +""" + + +# Declarative Source +class SourceFleetio(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/__init__.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/config.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/config.py new file mode 100644 index 000000000000..13c7ba841295 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/config.py @@ -0,0 +1,22 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict + + +class ConfigBuilder: + def __init__(self) -> None: + self._config: Dict[str, Any] = { + "account_token": "Fleetio Account Token", + "api_key": "Fleetio API Token", + } + + def with_account_token(self, account_token: str) -> "ConfigBuilder": + self._config["account_token"] = account_token + return self + + def with_api_key(self, api_key: str) -> "ConfigBuilder": + self._config["api_key"] = api_key + return self + + def build(self) -> Dict[str, Any]: + return self._config diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_contacts.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_contacts.py new file mode 100644 index 000000000000..9ef5757e3dc5 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_contacts.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "contacts" +_TEMPLATE_NAME = "contacts" +_RECORDS_PATH = FieldPath("contacts") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _a_contact() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _contacts_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class ContactsTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v2/contacts", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _contacts_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_expense_entries.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_expense_entries.py new file mode 100644 index 000000000000..0edae1c1368f --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_expense_entries.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "expense_entries" +_TEMPLATE_NAME = "expense_entries" +_RECORDS_PATH = FieldPath("expense_entries") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _an_expense_entry() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _expense_entries_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class ExpenseEntriesTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v1/expense_entries", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _expense_entries_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_fuel_entries.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_fuel_entries.py new file mode 100644 index 000000000000..adc4ad5eb188 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_fuel_entries.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "fuel_entries" +_TEMPLATE_NAME = "fuel_entries" +_RECORDS_PATH = FieldPath("fuel_entries") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _a_fuel_entry() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _fuel_entries_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class FuelEntriesTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v1/fuel_entries", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _fuel_entries_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_issues.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_issues.py new file mode 100644 index 000000000000..9bcc8be10511 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_issues.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "issues" +_TEMPLATE_NAME = "issues" +_RECORDS_PATH = FieldPath("issues") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _an_issue() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _issues_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class IssuesTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v2/issues", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _issues_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_parts.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_parts.py new file mode 100644 index 000000000000..e0cb92e3d326 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_parts.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "parts" +_TEMPLATE_NAME = "parts" +_RECORDS_PATH = FieldPath("parts") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _a_part() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _parts_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class PartsTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v1/parts", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _parts_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_purchase_orders.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_purchase_orders.py new file mode 100644 index 000000000000..982ee2a43d5e --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_purchase_orders.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "purchase_orders" +_TEMPLATE_NAME = "purchase_orders" +_RECORDS_PATH = FieldPath("purchase_orders") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _a_purchase_order() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _purchase_orders_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class PurchaseOrdersTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v1/purchase_orders", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _purchase_orders_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_service_entries.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_service_entries.py new file mode 100644 index 000000000000..fcd698d67422 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_service_entries.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "service_entries" +_TEMPLATE_NAME = "service_entries" +_RECORDS_PATH = FieldPath("service_entries") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _a_service_entry() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _service_entries_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class ServiceEntriesTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v2/service_entries", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _service_entries_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_submitted_inspection_forms.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_submitted_inspection_forms.py new file mode 100644 index 000000000000..433077ffec3a --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_submitted_inspection_forms.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "submitted_inspection_forms" +_TEMPLATE_NAME = "submitted_inspection_forms" +_RECORDS_PATH = FieldPath("submitted_inspection_forms") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _a_submitted_inspection_form() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _submitted_inspection_forms_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class SubmittedInspectionFormsTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v1/submitted_inspection_forms", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _submitted_inspection_forms_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_vehicle_assigments.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_vehicle_assigments.py new file mode 100644 index 000000000000..7f98316a735f --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_vehicle_assigments.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "vehicle_assignments" +_TEMPLATE_NAME = "vehicle_assignments" +_RECORDS_PATH = FieldPath("vehicle_assignments") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _a_vehicle_assigment() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _vehicle_assignments_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class VehicleAssignmentsTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v1/vehicle_assignments", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _vehicle_assignments_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_vehicles.py b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_vehicles.py new file mode 100644 index 000000000000..692db8d0fff4 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/integration/test_vehicles.py @@ -0,0 +1,82 @@ +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. + +from typing import Any, Dict, Optional +from unittest import TestCase + +from airbyte_cdk.test.catalog_builder import CatalogBuilder +from airbyte_cdk.test.entrypoint_wrapper import EntrypointOutput, read +from airbyte_cdk.test.mock_http import HttpMocker, HttpRequest +from airbyte_cdk.test.mock_http.response_builder import ( + FieldPath, + HttpResponseBuilder, + RecordBuilder, + create_record_builder, + create_response_builder, + find_template, +) +from airbyte_protocol.models import SyncMode +from integration.config import ConfigBuilder +from source_fleetio import SourceFleetio + +_AN_ACCOUNT_TOKEN = "example_account_token" # used from our dev docs as an example +_AN_API_KEY = "example_api_key" # used from our dev docs as an example +_STREAM_NAME = "vehicles" +_TEMPLATE_NAME = "vehicles" +_RECORDS_PATH = FieldPath("vehicles") +_API_URL = "https://secure.fleetio.com/api" +_API_VERSION = "2024-03-15" + + +def _a_vehicle() -> RecordBuilder: + return create_record_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _vehicles_response() -> HttpResponseBuilder: + return create_response_builder( + find_template(_TEMPLATE_NAME, __file__), + _RECORDS_PATH, + ) + + +def _read( + config_builder: ConfigBuilder, + state: Optional[Dict[str, Any]] = None, + expecting_exception: bool = False, +) -> EntrypointOutput: + return read( + SourceFleetio(), + config_builder.build(), + CatalogBuilder().with_stream(_STREAM_NAME, SyncMode.full_refresh).build(), + state, + expecting_exception, + ) + + +class VehiclesTest(TestCase): + @HttpMocker() + def test_request_is_created_properly(self, http_mocker: HttpMocker): + http_mocker.get( + HttpRequest( + url=f"{_API_URL}/v1/vehicles", + query_params={ + "per_page": "100", + }, + headers={ + "Authorization": f"Token {_AN_API_KEY}", + "Account-Token": _AN_ACCOUNT_TOKEN, + "X-Client-Name": "data_connector", + "X-Client-Platform": "fleetio_airbyte", + "X-Api-Version": _API_VERSION, + }, + ), + _vehicles_response().build(), + ) + + _read( + ConfigBuilder() + .with_account_token(_AN_ACCOUNT_TOKEN) + .with_api_key(_AN_API_KEY) + ) diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/contacts.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/contacts.json new file mode 100644 index 000000000000..3850802ddf90 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/contacts.json @@ -0,0 +1,59 @@ +{ + "id": 0, + "created_at": "2023-03-14T13:46:27-06:00", + "updated_at": "2023-03-14T13:46:27-06:00", + "archived_at": "2023-03-14T13:46:27-06:00", + "email": "string", + "name": "string", + "first_name": "string", + "middle_name": "string", + "last_name": "string", + "group_id": 0, + "group_name": "string", + "group_hierarchy": "Group 1|Group 2|Group 3", + "technician": true, + "vehicle_operator": true, + "employee": true, + "birth_date": "2023-03-14", + "street_address": "123 Main St", + "street_address_line_2": "Apt 1", + "city": "string", + "region": "string", + "postal_code": "string", + "country": "string", + "employee_number": "string", + "job_title": "string", + "license_class": "string", + "license_number": "string", + "license_state": "string", + "home_phone_number": "string", + "mobile_phone_number": "string", + "work_phone_number": "string", + "other_phone_number": "string", + "start_date": "2024-02-22", + "leave_date": "2024-02-22", + "hourly_labor_rate_cents": 0, + "attachment_permissions": { + "read_photos": true, + "manage_photos": true, + "read_documents": true, + "manage_documents": true + }, + "default_image_url": "string", + "account_membership_id": 0, + "images": [ + { + "id": 0, + "created_at": "2023-03-14T13:46:27-06:00", + "updated_at": "2023-03-14T13:46:27-06:00", + "imageable_id": 0, + "imageable_type": "ExpenseEntry", + "file_name": "string", + "file_mime_type": "image/jpeg", + "file_size": 0, + "file_url": "string", + "full_url": "string" + } + ], + "images_count": 0 +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/expense_entries.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/expense_entries.json new file mode 100644 index 000000000000..2afb893038e9 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/expense_entries.json @@ -0,0 +1,18 @@ +{ + "created_at": "string", + "custom_fields": {}, + "expense_entry_type_id": 0, + "expense_entry_type_name": "string", + "id": 0, + "notes": "string", + "occurred_at": "string", + "total_amount": 0, + "total_amount_cents": 0, + "updated_at": "string", + "vehicle_id": 0, + "vehicle_name": "string", + "vehicle": {}, + "vendor_id": 0, + "vendor_name": "string", + "vendor": {} +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/fuel_entries.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/fuel_entries.json new file mode 100644 index 000000000000..30018acb5093 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/fuel_entries.json @@ -0,0 +1,62 @@ +{ + "attachment_permissions": {}, + "comments_count": 0, + "created_at": "2023-03-14T13:46:27-06:00", + "custom_fields": {}, + "date": "2023-03-14T13:46:27-06:00", + "external_id": "string", + "fuel_type_id": 0, + "fuel_type_name": "string", + "kpl": "string", + "documents_count": 0, + "fuel_economy_units_for_current_user": "string", + "id": 0, + "images_count": 0, + "is_sample": true, + "liters": "string", + "meter_entry": { + "id": 0, + "created_at": "2023-03-14T13:46:27-06:00", + "updated_at": "2023-03-14T13:46:27-06:00", + "auto_voided_at": "2023-03-14T13:46:27-06:00", + "category": "starting", + "meter_type": "secondary", + "meterable_id": 0, + "meterable_type": "FuelEntry", + "value": "string", + "vehicle_id": 0, + "void": false, + "type": "GpsMeterEntry", + "date": "2023-03-14" + }, + "partial": true, + "personal": true, + "price_per_volume_unit": "string", + "reference": "string", + "region": "string", + "reset": true, + "uk_gallons_per_hr": "string", + "us_gallons_per_hr": "string", + "usage_in_hr": "string", + "usage_in_km": "string", + "usage_in_mi": "string", + "liters_per_hr": "string", + "lp100k": "string", + "mpg_uk": "string", + "mpg_us": "string", + "total_amount": "string", + "total_amount_cents": 0, + "type": "string", + "uk_gallons": "string", + "updated_at": "string", + "cost_per_hr": "string", + "cost_per_km": "string", + "cost_per_mi": "string", + "us_gallons": "string", + "vehicle_id": 0, + "vehicle_name": "string", + "vehicle": {}, + "vendor_id": 0, + "vendor": {}, + "watchers_count": 0 +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/issues.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/issues.json new file mode 100644 index 000000000000..e2a8a90e4ecc --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/issues.json @@ -0,0 +1,40 @@ +{ + "asset_type": "string", + "name": "string", + "closed_at": "2023-03-14T13:46:27-06:00", + "closed_by": {}, + "closed_note": "string", + "comments_count": 0, + "created_at": "2023-03-14T13:46:27-06:00", + "creation_type": "string", + "description": "string", + "documents_count": 0, + "due_date": "2023-03-14T13:46:27-06:00", + "due_meter_value": 0, + "due_secondary_meter_value": 0, + "equipment": {}, + "attachment_permissions": {}, + "external_id": 0, + "fault_id": 0, + "fault": 0, + "type": {}, + "fault_rule": {}, + "id": 0, + "images_count": 0, + "number": "string", + "reported_at": "2023-03-14T13:46:27-06:00", + "reported_by": {}, + "due_primary_meter_value": "string", + "overdue": true, + "resolvable_id": 0, + "resolvable": {}, + "resolvable_type": "string", + "resolved_at": "string", + "state": "string", + "submitted_inspection_form_id": 0, + "submitted_inspection_form": {}, + "summary": "string", + "vehicle": {}, + "updated_at": "2023-03-14T13:46:27-06:00", + "watchers_count": 0 +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/parts.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/parts.json new file mode 100644 index 000000000000..df24f2ad94d8 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/parts.json @@ -0,0 +1,30 @@ +{ + "attachment_permissions": {}, + "comments_count": 0, + "created_at": "string", + "custom_fields": {}, + "default_image_url": "string", + "default_image_url_large": "string", + "default_image_url_medium": "string", + "default_image_url_small": "string", + "description": "string", + "documents_count": 0, + "id": 0, + "images_count": 0, + "manufacturer_part_number": "string", + "measurement_unit_id": 0, + "measurement_unit_name": "string", + "number": "string", + "part_category": {}, + "part_category_id": 0, + "part_category_name": "string", + "part_manufacturer": {}, + "part_manufacturer_id": 0, + "part_manufacturer_name": "string", + "unit_cost": 0, + "unit_cost_cents": 0, + "average_unit_cost_cents": 0, + "upc": "string", + "archived_at": "2023-03-14T13:46:27-06:00", + "updated_at": "2023-03-14T13:46:27-06:00" +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/purchase_orders.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/purchase_orders.json new file mode 100644 index 000000000000..0ffd9fd7f6e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/purchase_orders.json @@ -0,0 +1,57 @@ +{ + "approved_at": "2023-03-14T13:46:27-06:00", + "approved_by": "string", + "comments_count": 0, + "created_at": "2023-03-14T13:46:27-06:00", + "created_by": "string", + "custom_fields": {}, + "description": "string", + "discount": 0, + "discount_percentage": 0, + "discount_type": "string", + "documents_count": 0, + "id": 0, + "number": 0, + "part_location_id": 0, + "part_location_name": "string", + "shipping": 0, + "state": "string", + "subtotal": 0, + "tax_1": 0, + "tax_1_percentage": "string", + "tax_1_type": "string", + "tax_2": 0, + "tax_2_percentage": "string", + "tax_2_type": "string", + "total_amount": 0, + "watchers_count": 0, + "status_name": "string", + "status_color": "string", + "submitted_for_approval_at": "2023-03-14T13:46:27-06:00", + "rejected_at": "2023-03-14T13:46:27-06:00", + "purchased_at": "2023-03-14T13:46:27-06:00", + "received_partial_at": "2023-03-14T13:46:27-06:00", + "received_full_at": "2023-03-14T13:46:27-06:00", + "closed_at": "2023-03-14T13:46:27-06:00", + "discount_cents": 0, + "tax_1_cents": 0, + "tax_2_cents": 0, + "subtotal_cents": 0, + "shipping_cents": 0, + "total_amount_cents": 0, + "approved_by_id": 0, + "created_by_id": 0, + "type": 0, + "closed_by_id": 0, + "purchased_by_id": 0, + "rejected_by_id": 0, + "received_partial_by_id": 0, + "received_full_by_id": 0, + "purchase_order_status_id": 0, + "submitted_for_approval_by_id": 0, + "updated_at": "2023-03-14T13:46:27-06:00", + "vendor_id": 0, + "vendor_name": "string", + "vendor": {}, + "labels": [] +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/service_entries.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/service_entries.json new file mode 100644 index 000000000000..8d002368c784 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/service_entries.json @@ -0,0 +1,52 @@ +{ + "attachment_permissions": {}, + "auto_integrate_repair_order_status": "string", + "invoice": {}, + "warranty_credits_cents": 0, + "warranty_credits_percentage": "string", + "warranty_credits_type": "string", + "labor_subtotal_cents": 0, + "parts_subtotal_cents": 0, + "subtotal_cents": 0, + "discout_cents": 0, + "tax_1_cents": 0, + "tax_2_cents": 0, + "total_amount_cents": 0, + "warrant_credits_cents": 0, + "comments_count": 0, + "completed_at": "2023-03-14T13:46:27-06:00", + "created_at": "2023-03-14T13:46:27-06:00", + "discount": 0, + "discount_percentage": "string", + "fees_cents": 0, + "discount_type": "string", + "discount_cents": 0, + "documents_count": 0, + "general_notes": "string", + "id": 0, + "images_count": 0, + "is_sample": 0, + "labels": [], + "labor_subtotal": 0, + "primary_meter_entry": {}, + "secondary_meter_entry": {}, + "parts_subtotal": 0, + "reference": "string", + "started_at": "2023-03-14T13:46:27-06:00", + "status": "string", + "tax_1": 0, + "tax_1_percentage": "string", + "tax_1_type": "string", + "tax_2": 0, + "tax_2_percentage": "string", + "tax_2_type": "string", + "updated_at": "2023-03-14T13:46:27-06:00", + "vehicle_id": 0, + "vehicle": {}, + "vendor_id": 0, + "vendor_name": "string", + "vendor": {}, + "vmrs_repair_priority_class": {}, + "work_order_id": 0, + "work_order_number": 0 +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/submitted_inspection_forms.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/submitted_inspection_forms.json new file mode 100644 index 000000000000..279fb62d79a7 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/submitted_inspection_forms.json @@ -0,0 +1,14 @@ +{ + "date": "2023-03-14T13:46:27-06:00", + "failed_items": 0, + "id": 0, + "inspection_form": {}, + "started_at": "2023-03-14T13:46:27-06:00", + "starting_latitude": 0, + "starting_longitude": 0, + "submitted_at": "2023-03-14T13:46:27-06:00", + "submitted_latitude": 0, + "submitted_longitude": 0, + "user": {}, + "vehicle": {} +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/vehicle_assignments.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/vehicle_assignments.json new file mode 100644 index 000000000000..83a6208b92d0 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/vehicle_assignments.json @@ -0,0 +1,20 @@ +{ + "attachment_permissions": {}, + "contact": {}, + "vehicle": {}, + "comments_count": 0, + "contact_full_name": "string", + "contact_id": 0, + "started_at": "2023-03-14T13:46:27-06:00", + "ended_at": "2023-03-14T13:46:27-06:00", + "contact_image_url": "string", + "created_at": "2023-03-14T13:46:27-06:00", + "current": true, + "custom_fields": {}, + "future": true, + "starting_meter_entry_value": "string", + "ending_meter_entry_value": "string", + "id": 0, + "updated_at": "2023-03-14T13:46:27-06:00", + "vehicle_id": 0 +} diff --git a/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/vehicles.json b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/vehicles.json new file mode 100644 index 000000000000..7fdcab529ff8 --- /dev/null +++ b/airbyte-integrations/connectors/source-fleetio/unit_tests/resource/http/response/vehicles.json @@ -0,0 +1,61 @@ +{ + "account_id": 0, + "archived_at": "2023-03-14T13:46:27-06:00", + "fuel_type_id": 0, + "fuel_type_name": "string", + "ai_enabled": true, + "assetable_type": "string", + "color": "string", + "comments_count": 0, + "created_at": "2023-03-14T13:46:27-06:00", + "current_location_entry_id": 0, + "default_image_url_small": "string", + "external_ids": {}, + "documents_count": 0, + "estimated_replacement_mileage": "string", + "estimated_resale_price_cents": 0, + "estimated_service_months": 0, + "fuel_entries_count": 0, + "fuel_volume_units": "string", + "group_ancestry": "string", + "group_id": 0, + "group_name": "string", + "id": 0, + "images_count": 0, + "in_service_date": "string", + "in_service_meter_value": "string", + "is_sample": true, + "issues_count": 0, + "labels": [], + "license_plate": "string", + "make": "string", + "model": "string", + "name": "string", + "out_of_service_date": "2023-03-14T13:46:27-06:00", + "out_of_service_meter_value": "string", + "ownership": "string", + "primary_meter_date": "2023-03-14T13:46:27-06:00", + "primary_meter_unit": "string", + "primary_meter_usage_per_day": "string", + "secondary_meter_unit": "string", + "secondary_meter_date": "2023-03-14T13:46:27-06:00", + "primary_meter_value": "string", + "registration_expiration_month": "string", + "secondary_meter_usage_per_day": "string", + "secondary_meter_value": "string", + "service_entries_count": 0, + "service_reminders_count": 0, + "system_of_measurement": "string", + "trim": "string", + "registration_state": "string", + "updated_at": "2023-03-14T13:46:27-06:00", + "vehicle_renewal_reminders_count": 0, + "vehicle_status_color": "string", + "vehicle_status_id": 0, + "vehicle_status_name": "string", + "vehicle_type_id": 0, + "vehicle_type_name": "string", + "vin": "string", + "work_orders_count": 0, + "year": 0 +} diff --git a/airbyte-integrations/connectors/source-shopify/acceptance-test-config.yml b/airbyte-integrations/connectors/source-shopify/acceptance-test-config.yml index 5473d1c9d319..b2cd36067ef6 100644 --- a/airbyte-integrations/connectors/source-shopify/acceptance-test-config.yml +++ b/airbyte-integrations/connectors/source-shopify/acceptance-test-config.yml @@ -59,6 +59,10 @@ acceptance_tests: future_state: future_state_path: "integration_tests/abnormal_state.json" timeout_seconds: 8400 + # some of the streams are not passing the `test_read_sequential_slices` + # due to the inability to deal with `frozen` data in sandbox + # TODO: remove this, once the CAT's `test_read_sequential_slices` is fixed. + skip_comprehensive_incremental_tests: true full_refresh: tests: - config_path: "secrets/config.json" diff --git a/docs/connector-development/connector-metadata-file.md b/docs/connector-development/connector-metadata-file.md index fecc76273fe7..72c96521e911 100644 --- a/docs/connector-development/connector-metadata-file.md +++ b/docs/connector-development/connector-metadata-file.md @@ -177,7 +177,7 @@ If not specified, all remote registry configurations are disabled by default. The `connectorTestSuitesOptions` contains a list of test suite options for a connector. The list of declared test suites affects which suite will run in CI. -We currently accept three value for the `suite` field: +We currently accept three values for the `suite` field: * `unitTests` * `integrationTests` * `acceptanceTests` @@ -193,6 +193,51 @@ The `testSecrets` object has three properties: **If you are a community contributor please note that addition of a new secret to our secret store requires manual intervention from an Airbyter. Please reach out to your PR reviewers if you want to add a test secret to our CI.** #### The `secretStore` object -This object has three properties: +This object has two properties: * `type`: Defines the secret store type, only `GSM` (Google Secret Manager) is currently supported * `alias`: The alias of this secret store in our system, which is resolved into an actual secret store address by our CI. We currently have a single alias to store our connector test secrets: `airbyte-connector-testing-secret-store` . + +#### How to enable a test suite +We currently support three test suite types: +* `unitTests`, +* `integrationTests` +* `acceptanceTests` + + +To enable a test suite, add the suite name to the `connectorTestSuitesOptions` list: + +```yaml + connectorTestSuitesOptions: + - suite: unitTests + # This will enable acceptanceTests for this connector + # It declares that this test suite requires a secret named SECRET_SOURCE-FAKER_CREDS + # In our secret store, and that the secret should be stored in the connector secret folder in a file named config.json + - suite: acceptanceTests + testSecrets: + - name: SECRET_SOURCE-FAKER_CREDS + fileName: config.json + secretStore: + type: GSM + alias: airbyte-connector-testing-secret-store +``` + +##### Default paths and conventions + +The [`airbyte-ci`](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md) tool will automatically locate specific test types based on established conventions and will automatically store secret files (when needed) in the established secrets directory - which should be already excluded from accidental git commits. + +**Python connectors** +Tests are discovered by Pytest and are expected to be located in: +* `unit_tests` directory for the `unitTests` suite +* `integration_tests` directory for the `integrationTests` suite + +**Java connectors** +No specific directory is determined. Which test will run is determined by the Gradle configuration of the connector. +`airbyt-ci` runs the `test` Gradle task for the `unitTests` suite and the `integrationTest` Gradle task for the `integrationTests` suite. + +**Acceptance tests** + +They are language agnostic and are configured via the `acceptance-test-config.yml` file in the connector's root directory. More on that [here](https://docs.airbyte.com/connector-development/testing-connectors/connector-acceptance-tests-reference). + +**Default secret paths** +The listed secrets in `testSecrets` with a file name will be mounted to the connector's `secrets` directory. The `fileName` should be relative to this directory. +E.G.: `fileName: config.json` will be mounted to `/secrets/config.json` \ No newline at end of file diff --git a/docs/integrations/destinations/duckdb-migrations.md b/docs/integrations/destinations/duckdb-migrations.md index 53ae60158669..66cd9e2a14ca 100644 --- a/docs/integrations/destinations/duckdb-migrations.md +++ b/docs/integrations/destinations/duckdb-migrations.md @@ -1,5 +1,11 @@ # DuckDB Migration Guide +## Upgrading to 0.4.0 + +This version updates the DuckDB libraries from `v0.9.2` to `v0.10.3`. Note that DuckDB `0.10.x` is not backwards compatible with databases created in prior versions of DuckDB. You should upgrade your database file before upgrading this connector, and you should consider the impact on any other tooling you are using to connect to your database. Please see the [DuckDB 0.10.0 announcement](https://duckdb.org/2024/02/13/announcing-duckdb-0100.html) for more information and for upgrade instructions. + +MotherDuck users will need to log into the MotherDuck UI at https://app.motherduck.com/, navigate to settings, and then opt in to the database conversion. + ## Upgrading to 0.3.0 This version updates the DuckDB libraries from `v0.8.1` to `v0.9.1`. Note that DuckDB `0.9.x` is not backwards compatible with prior versions of DuckDB. Please see the [DuckDB 0.9.0 release notes](https://github.com/duckdb/duckdb/releases/tag/v0.9.0) for more information and for upgrade instructions. diff --git a/docs/integrations/destinations/duckdb.md b/docs/integrations/destinations/duckdb.md index 73bb1d08f2b8..d31faeb14272 100644 --- a/docs/integrations/destinations/duckdb.md +++ b/docs/integrations/destinations/duckdb.md @@ -102,13 +102,20 @@ Note: If you are running Airbyte on Windows with Docker backed by WSL2, you have +## Troubleshooting + +### Error message `Request failed: (UNAVAILABLE, RPC 'GET_WELCOME_PACK')` + +This error may indicate that you are connecting with a `0.10.x` DuckDB client (as per DuckDB Destination connector versions `>=0.4.0`) and your database has not yet been upgraded to a version `>=0.10.x`. To resolve this, you'll need to manually upgrade your database or revert to a previous version of the DuckDB Destination connector. + + ## Changelog | Version | Date | Pull Request | Subject | | :------ | :--------- | :-------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| 0.3.6 | 2024-05-21 | [38486](https://github.com/airbytehq/airbyte/pull/38486) | [autopull] base image + poetry + up_to_date | +| 0.4.0 | 2024-05-30 | [#37515](https://github.com/airbytehq/airbyte/pull/37515) | Upgrade DuckDB engine version to [`v0.10.3`](https://github.com/duckdb/duckdb/releases/tag/v0.10.2). | +| 0.3.6 | 2024-05-21 | [#38486](https://github.com/airbytehq/airbyte/pull/38486) | [autopull] base image + poetry + up_to_date | | 0.3.5 | 2024-04-23 | [#37515](https://github.com/airbytehq/airbyte/pull/37515) | Add resource requirements declaration to `metatadat.yml`. | -| :------ | :--------- | :------------------------------------------------------- | :--------------------- | | 0.3.4 | 2024-04-16 | [#36715](https://github.com/airbytehq/airbyte/pull/36715) | Improve ingestion performance using pyarrow inmem view for writing to DuckDB. | | 0.3.3 | 2024-04-07 | [#36884](https://github.com/airbytehq/airbyte/pull/36884) | Fix stale dependency versions in lock file, add CLI for internal testing. | | 0.3.2 | 2024-03-20 | [#32635](https://github.com/airbytehq/airbyte/pull/32635) | Instrument custom_user_agent to identify Airbyte-Motherduck connector usage. | diff --git a/docs/integrations/destinations/oracle.md b/docs/integrations/destinations/oracle.md index 14d388c0a090..ab151cce5fed 100644 --- a/docs/integrations/destinations/oracle.md +++ b/docs/integrations/destinations/oracle.md @@ -32,8 +32,7 @@ The Oracle connector is currently in Alpha on Airbyte Cloud. Only TLS encrypted To use the Oracle destination, you'll need: -- An Oracle server version 18 or above -- It's possible to use Oracle 12+ but you need to configure the table name length to 120 chars. +- An Oracle server version 21 or above #### Network Access diff --git a/docs/integrations/destinations/snowflake.md b/docs/integrations/destinations/snowflake.md index 149b474ef5b2..4cc3edc42f89 100644 --- a/docs/integrations/destinations/snowflake.md +++ b/docs/integrations/destinations/snowflake.md @@ -276,8 +276,9 @@ desired namespace. | Version | Date | Pull Request | Subject | |:----------------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------| -| 3.8.4 | 2024-05-23 | [\#38632](https://github.com/airbytehq/airbyte/pull/38632) | convert all tests to kotlin | -| 3.8.3 | 2024-05-23 | [\#38586](https://github.com/airbytehq/airbyte/pull/38586) | Bump CDK version | +| 3.9.0 | 2024-05-23 | [\#38658](https://github.com/airbytehq/airbyte/pull/38658) | Adapting to newer interfaces from #38107 | +| 3.8.4 | 2024-05-23 | [\#38632](https://github.com/airbytehq/airbyte/pull/38632) | convert all tests to kotlin | +| 3.8.3 | 2024-05-23 | [\#38586](https://github.com/airbytehq/airbyte/pull/38586) | Bump CDK version | | 3.8.2 | 2024-05-22 | [\#38553](https://github.com/airbytehq/airbyte/pull/38553) | Remove `SwitchingDestination` and `AbstractJdbcDestination` dependency in destination | | 3.8.1 | 2024-05-22 | [\#38568](https://github.com/airbytehq/airbyte/pull/38568) | Adopt latest CDK | | 3.8.0 | 2024-05-08 | [\#37715](https://github.com/airbytehq/airbyte/pull/37715) | Remove option for incremental typing and deduping | diff --git a/docs/integrations/sources/bamboo-hr.md b/docs/integrations/sources/bamboo-hr.md index 07702749d103..3085a09d2396 100644 --- a/docs/integrations/sources/bamboo-hr.md +++ b/docs/integrations/sources/bamboo-hr.md @@ -51,7 +51,7 @@ This page contains the setup guide and reference information for the [BambooHR]( The BambooHR source connector supports the following [sync modes](https://docs.airbyte.com/cloud/core-concepts#connection-sync-modes): | Feature | Supported? | -| :------------------------ | :--------- | +|:--------------------------|:-----------| | Full Refresh Sync | Yes | | Incremental - Append Sync | No | | SSL connection | Yes | @@ -85,7 +85,8 @@ Please [create an issue](https://github.com/airbytehq/airbyte/issues) if you see ## Changelog | Version | Date | Pull Request | Subject | -| :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------------------ | +|:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------| +| 0.3.0 | 2024-05-25 | [37452](https://github.com/airbytehq/airbyte/pull/37452) | Migrate to Low Code | | 0.2.6 | 2024-04-19 | [37124](https://github.com/airbytehq/airbyte/pull/37124) | Updating to 0.80.0 CDK | | 0.2.5 | 2024-04-18 | [37124](https://github.com/airbytehq/airbyte/pull/37124) | Manage dependencies with Poetry. | | 0.2.4 | 2024-04-15 | [37124](https://github.com/airbytehq/airbyte/pull/37124) | Base image migration: remove Dockerfile and use the python-connector-base image | diff --git a/docs/integrations/sources/facebook-marketing-migrations.md b/docs/integrations/sources/facebook-marketing-migrations.md index 8463cb663630..d6d7f6232385 100644 --- a/docs/integrations/sources/facebook-marketing-migrations.md +++ b/docs/integrations/sources/facebook-marketing-migrations.md @@ -1,5 +1,27 @@ # Facebook Marketing Migration Guide +## Upgrading to 3.1.0 + +The `AdsInsights` Reports now don't have the possibility to fetch the next root level properties (fields): +- cost_per_conversion_lead +- conversion_lead_rate + +### Refresh affected AdsInsights Report and clear data: + +1. Select **Connections** in the main navbar. + 1. Select the connection(s) affected by the update. +2. Select the **Schema** tab. + 1. Select **Refresh source schema**. + 2. Select **OK** + 3. Select **Save changes** at the bottom of the page. + :::note + Any detected schema changes will be listed for your review. + ::: + +3. Navigate to a connection's **Settings** tab and click **Clear data** to clear all streams. This action will clear data for all streams in the connection. To clear data for a single stream navigate to the **Status** tab, click the **three grey dots** next to the affected stream, and select **Clear data**. Do this for all affected streams in the connection. + +For more information on clearing your data in Airbyte, see [this page](/operator-guides/clear). + ## Upgrading to 3.0.0 Custom Insights Reports now have updated schema for following breakdowns: diff --git a/docs/integrations/sources/facebook-marketing.md b/docs/integrations/sources/facebook-marketing.md index e6eed1d1c84c..87a0c67641b3 100644 --- a/docs/integrations/sources/facebook-marketing.md +++ b/docs/integrations/sources/facebook-marketing.md @@ -202,6 +202,7 @@ The Facebook Marketing connector uses the `lookback_window` parameter to repeate | Version | Date | Pull Request | Subject | |:--------|:-----------|:---------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 3.1.0 | 2024-06-01 | [38845](https://github.com/airbytehq/airbyte/pull/38845) | Update AdsInsights fields - removed `cost_per_conversion_lead` and `conversion_lead_rate` | | 3.0.0 | 2024-04-30 | [36608](https://github.com/airbytehq/airbyte/pull/36608) | Update `body_asset, call_to_action_asset, description_asset, image_asset, link_url_asset, title_asset, video_asset` breakdowns schema. | | 2.1.9 | 2024-05-17 | [38301](https://github.com/airbytehq/airbyte/pull/38301) | Fix data inaccuracies when `wish_bid` is requested | | 2.1.8 | 2024-05-07 | [37771](https://github.com/airbytehq/airbyte/pull/37771) | Handle errors without API error codes/messages | diff --git a/docs/integrations/sources/fleetio.md b/docs/integrations/sources/fleetio.md new file mode 100644 index 000000000000..ff8c4de3a382 --- /dev/null +++ b/docs/integrations/sources/fleetio.md @@ -0,0 +1,54 @@ +# Fleetio API + +The Fleetio API Documentation can be found [here](https://developer.fleetio.com) + +## Sync Overview + +This connector works with the Fleetio API. The connector currently support Full Table Refreshes only. + +### Output schema + +The output schemas are: + +- [contacts](https://developer.fleetio.com/docs/api/v-2-contacts-index) +- [expense_entries](https://developer.fleetio.com/docs/api/v-1-expense-entries-index) +- [fuel_entries](https://developer.fleetio.com/docs/api/v-1-fuel-entries-index) +- [issues](https://developer.fleetio.com/docs/api/v-2-issues-index) +- [parts](https://developer.fleetio.com/docs/api/v-1-parts-index) +- [purchase_orders](https://developer.fleetio.com/docs/api/v-1-purchase-orders-index) +- [service_entries](https://developer.fleetio.com/docs/api/v-2-service-entries-index) +- [submitted_inspection_forms](https://developer.fleetio.com/docs/api/v-1-submitted-inspection-forms-index) +- [vehicle_assignments](https://developer.fleetio.com/docs/api/v-1-vehicle-assignments-index) +- [vehicles](https://developer.fleetio.com/docs/api/v-1-vehicles-index) + +### Features + +| Feature | Supported? | +|:------------------|:-----------| +| Full Refresh Sync | Yes | +| Incremental Sync | No | + +### Performance considerations + +Our source connector adheres to the standard rate limiting with the Airbyte low-code CDK. More information on Fleetio API rate limiting can be found [here](https://developer.fleetio.com/docs/overview/rate-limiting). + +## Getting started + +### Requirements + +- An active Fleetio account +- A Fleetio `api_key` and `account_token` + +### Setup guide: + +1. Generate your Fleetio API Credentials, which is described [here](https://developer.fleetio.com/docs/overview/quick-start). +2. In the left navigation bar, click **Sources**. in the top-right corner, click **New Source** +3. Set the name for your source +4. Authenticate using the credentials generated in step 1. +5. Click **Set up source** + +## Changelog + +| Version | Date | Pull Request | Subject | +|:--------|:-----------|:---------------------------------------------------------|:------------------------------------------------| +| 0.1.0 | 2022-11-02 | [35633](https://github.com/airbytehq/airbyte/pull/35633) | 🎉 New Source: Fleetio source \ No newline at end of file diff --git a/docs/integrations/sources/low-code.md b/docs/integrations/sources/low-code.md index 6369a10f9203..6a0b1807896b 100644 --- a/docs/integrations/sources/low-code.md +++ b/docs/integrations/sources/low-code.md @@ -9,6 +9,8 @@ The changelog below is automatically updated by the `bump_version` command as pa | Version | Date | Pull Request | Subject | | :------ | :--------- | :------------------------------------------------------- | :------------------------------------------------------------------- | +| 1.2.1 | 2024-06-03 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 1.2.1 | +| 1.2.0 | 2024-06-03 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 1.2.0 | | 1.1.1 | 2024-05-30 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 1.1.3 | | 1.1.0 | 2024-05-22 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 1.1.0 | | 1.0.0 | 2024-05-20 | [36501](https://github.com/airbytehq/airbyte/pull/36501) | Bump CDK version to 1.0.0 | diff --git a/docs/operator-guides/refreshes.md b/docs/operator-guides/refreshes.md index a68a8aeb703b..28e9830bde9d 100644 --- a/docs/operator-guides/refreshes.md +++ b/docs/operator-guides/refreshes.md @@ -85,7 +85,7 @@ With the advent of Refresh and Retain History Syncs, Airbyte has provided a way ### Example: Understanding and Recovering from a Flaky Source -Consider the following example. You are extracting data into your data warehouse and notice that data for April, 2024 is missing. You are using an append sync mode. +Consider the following example. You are extracting data into your data warehouse and notice that data for March, 2024 is missing. You are using an append sync mode. | year_month (pk) | total_sales | \_airbyte_extracted_at | \_airbyte_generation_id | \_airbyte_meta | \_airbyte_raw_id | | --------------- | ----------- | ---------------------- | ----------------------- | ------------------------------ | ---------------- | diff --git a/pyproject.toml b/pyproject.toml index 68683e9fa983..8b1bab693174 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ extend-exclude = """ | airbyte-cdk/python/airbyte_cdk/sources/declarative/models | invalid | non_formatted_code + | airbyte-integrations/connectors/destination-duckdb | airbyte-integrations/connectors/destination-snowflake-cortex )/ """ @@ -90,7 +91,8 @@ skip_glob = [ # TODO: Remove this after we move to Ruff. Ruff is mono-repo-aware and # correctly handles first-party imports in subdirectories. - # Migrated to `ruff`: + # Migrated to Ruff: + "airbyte-integrations/connectors/destination-duckdb/**", "airbyte-integrations/connectors/destination-snowflake-cortex/**" ]