diff --git a/.github/workflows/publish-command.yml b/.github/workflows/publish-command.yml index 5182801bb62f..482531bea02e 100644 --- a/.github/workflows/publish-command.yml +++ b/.github/workflows/publish-command.yml @@ -54,6 +54,7 @@ jobs: ADWORDS_INTEGRATION_TEST_CREDS: ${{ secrets.ADWORDS_INTEGRATION_TEST_CREDS }} AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} AWS_REDSHIFT_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_REDSHIFT_INTEGRATION_TEST_CREDS }} + SOURCE_AWS_CLOUDTRAIL_CREDS: ${{ secrets.SOURCE_AWS_CLOUDTRAIL_CREDS }} AZURE_STORAGE_INTEGRATION_TEST_CREDS: ${{ secrets.AZURE_STORAGE_INTEGRATION_TEST_CREDS }} BIGQUERY_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDS }} BRAINTREE_TEST_CREDS: ${{ secrets.BRAINTREE_TEST_CREDS }} diff --git a/.github/workflows/test-command.yml b/.github/workflows/test-command.yml index afcff0c278be..8f8fe677dbac 100644 --- a/.github/workflows/test-command.yml +++ b/.github/workflows/test-command.yml @@ -53,6 +53,7 @@ jobs: AMPLITUDE_INTEGRATION_TEST_CREDS: ${{ secrets.AMPLITUDE_INTEGRATION_TEST_CREDS }} ADWORDS_INTEGRATION_TEST_CREDS: ${{ secrets.ADWORDS_INTEGRATION_TEST_CREDS }} AWS_S3_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_S3_INTEGRATION_TEST_CREDS }} + SOURCE_AWS_CLOUDTRAIL_CREDS: ${{ secrets.SOURCE_AWS_CLOUDTRAIL_CREDS }} AWS_REDSHIFT_INTEGRATION_TEST_CREDS: ${{ secrets.AWS_REDSHIFT_INTEGRATION_TEST_CREDS }} AZURE_STORAGE_INTEGRATION_TEST_CREDS: ${{ secrets.AZURE_STORAGE_INTEGRATION_TEST_CREDS }} BIGQUERY_INTEGRATION_TEST_CREDS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDS }} diff --git a/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/6ff047c0-f5d5-4ce5-8c81-204a830fa7e1.json b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/6ff047c0-f5d5-4ce5-8c81-204a830fa7e1.json new file mode 100644 index 000000000000..f7d0b4cbd2c1 --- /dev/null +++ b/airbyte-config/init/src/main/resources/config/STANDARD_SOURCE_DEFINITION/6ff047c0-f5d5-4ce5-8c81-204a830fa7e1.json @@ -0,0 +1,7 @@ +{ + "sourceDefinitionId": "6ff047c0-f5d5-4ce5-8c81-204a830fa7e1", + "name": "AWS CloudTrail", + "dockerRepository": "airbyte/source-aws-cloudtrail", + "dockerImageTag": "0.1.0", + "documentationUrl": "https://docs.airbyte.io/integrations/sources/aws-cloudtrail" +} diff --git a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml index bf05972b885c..879d67b5ab5f 100644 --- a/airbyte-config/init/src/main/resources/seed/source_definitions.yaml +++ b/airbyte-config/init/src/main/resources/seed/source_definitions.yaml @@ -334,3 +334,8 @@ dockerRepository: airbyte/source-db2 dockerImageTag: 0.1.0 documentationUrl: https://docs.airbyte.io/integrations/sources/db2 +- sourceDefinitionId: 6ff047c0-f5d5-4ce5-8c81-204a830fa7e1 + name: AWS CloudTrail + dockerRepository: airbyte/source-aws-cloudtrail + dockerImageTag: 0.1.0 + documentationUrl: https://docs.airbyte.io/integrations/sources/aws-cloudtrail diff --git a/airbyte-integrations/builds.md b/airbyte-integrations/builds.md index b5430c1fecce..55e8b372f022 100644 --- a/airbyte-integrations/builds.md +++ b/airbyte-integrations/builds.md @@ -11,6 +11,8 @@ Asana [![source-asana](https://img.shields.io/endpoint?url=https%3A%2F%2Fstatus-api.airbyte.io%2Ftests%2Fsummary%2Fsource-asana%2Fbadge.json)](https://status-api.airbyte.io/tests/summary/source-asana) + AWS CloudTrail [![source-aws-cloudtrail](https://img.shields.io/endpoint?url=https%3A%2F%2Fstatus-api.airbyte.io%2Ftests%2Fsummary%2Fsource-aws-cloudtrail%2Fbadge.json)](https://status-api.airbyte.io/tests/summary/source-aws-cloudtrail) + Braintree [![source-braintree-singer](https://img.shields.io/endpoint?url=https%3A%2F%2Fstatus-api.airbyte.io%2Ftests%2Fsummary%2Fsource-braintree-singer%2Fbadge.json)](https://status-api.airbyte.io/tests/summary/source-braintree-singer) Drift [![source-drift](https://img.shields.io/endpoint?url=https%3A%2F%2Fstatus-api.airbyte.io%2Ftests%2Fsummary%2Fsource-drift%2Fbadge.json)](https://status-api.airbyte.io/tests/summary/source-drift) diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/.dockerignore b/airbyte-integrations/connectors/source-aws-cloudtrail/.dockerignore new file mode 100644 index 000000000000..1cd1a8685ba3 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/.dockerignore @@ -0,0 +1,7 @@ +* +!Dockerfile +!Dockerfile.test +!main.py +!source_aws_cloudtrail +!setup.py +!secrets diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/CHANGELOG.md b/airbyte-integrations/connectors/source-aws-cloudtrail/CHANGELOG.md new file mode 100644 index 000000000000..f64c33f7bfe4 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/CHANGELOG.md @@ -0,0 +1,7 @@ +# Changelog + +## 0.1.0 +Initial Release. + +Added Management Events incremental stream: https://docs.aws.amazon.com/awscloudtrail/latest/APIReference/API_LookupEvents.html + diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/Dockerfile b/airbyte-integrations/connectors/source-aws-cloudtrail/Dockerfile new file mode 100644 index 000000000000..32b29678c21c --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/Dockerfile @@ -0,0 +1,15 @@ +FROM python:3.7-slim + +# Bash is installed for more convenient debugging. +RUN apt-get update && apt-get install -y bash && rm -rf /var/lib/apt/lists/* + +WORKDIR /airbyte/integration_code +COPY source_aws_cloudtrail ./source_aws_cloudtrail +COPY main.py ./ +COPY setup.py ./ +RUN pip install . + +ENTRYPOINT ["python", "/airbyte/integration_code/main.py"] + +LABEL io.airbyte.version=0.1.0 +LABEL io.airbyte.name=airbyte/source-aws-cloudtrail diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/README.md b/airbyte-integrations/connectors/source-aws-cloudtrail/README.md new file mode 100644 index 000000000000..9966b386fd74 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/README.md @@ -0,0 +1,131 @@ +# Aws Cloudtrail Source + +This is the repository for the Aws Cloudtrail source connector, written in Python. +For information about how to use this connector within Airbyte, see [the documentation](https://docs.airbyte.io/integrations/sources/aws-cloudtrail). + +## Local development + +### Prerequisites +**To iterate on this connector, make sure to complete this prerequisites section.** + +#### Minimum Python version required `= 3.7.0` + +#### Build & Activate Virtual Environment and install dependencies +From this connector directory, create a virtual environment: +``` +python -m venv .venv +``` + +This will generate a virtualenv for this module in `.venv/`. Make sure this venv is active in your +development environment of choice. To activate it from the terminal, run: +``` +source .venv/bin/activate +pip install -r requirements.txt +``` +If you are in an IDE, follow your IDE's instructions to activate the virtualenv. + +Note that while we are installing dependencies from `requirements.txt`, you should only edit `setup.py` for your dependencies. `requirements.txt` is +used for editable installs (`pip install -e`) to pull in Python dependencies from the monorepo and will call `setup.py`. +If this is mumbo jumbo to you, don't worry about it, just put your deps in `setup.py` but install using `pip install -r requirements.txt` and everything +should work as you expect. + +#### Building via Gradle +You can also build the connector in Gradle. This is typically used in CI and not needed for your development workflow. + +To build using Gradle, from the Airbyte repository root, run: +``` +./gradlew :airbyte-integrations:connectors:source-aws-cloudtrail:build +``` + +#### Create credentials +**If you are a community contributor**, follow the instructions in the [documentation](https://docs.airbyte.io/integrations/sources/aws-cloudtrail) +to generate the necessary credentials. Then create a file `secrets/config.json` conforming to the `source_aws_cloudtrail/spec.json` file. +Note that any directory named `secrets` is gitignored across the entire Airbyte repo, so there is no danger of accidentally checking in sensitive information. +See `integration_tests/sample_config.json` for a sample config file. + +**If you are an Airbyte core member**, copy the credentials in Lastpass under the secret name `source aws-cloudtrail test creds` +and place them into `secrets/config.json`. + +### Locally running the connector +``` +python main.py spec +python main.py check --config secrets/config.json +python main.py discover --config secrets/config.json +python main.py read --config secrets/config.json --catalog integration_tests/configured_catalog.json +``` + +### Locally running the connector docker image + +#### Build +First, make sure you build the latest Docker image: +``` +docker build . -t airbyte/source-aws-cloudtrail:dev +``` + +You can also build the connector image via Gradle: +``` +./gradlew :airbyte-integrations:connectors:source-aws-cloudtrail:airbyteDocker +``` +When building via Gradle, the docker image name and tag, respectively, are the values of the `io.airbyte.name` and `io.airbyte.version` `LABEL`s in +the Dockerfile. + +#### Run +Then run any of the connector commands as follows: +``` +docker run --rm airbyte/source-aws-cloudtrail:dev spec +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-aws-cloudtrail:dev check --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets airbyte/source-aws-cloudtrail:dev discover --config /secrets/config.json +docker run --rm -v $(pwd)/secrets:/secrets -v $(pwd)/integration_tests:/integration_tests airbyte/source-aws-cloudtrail:dev read --config /secrets/config.json --catalog /integration_tests/configured_catalog.json +``` +## Testing +Make sure to familiarize yourself with [pytest test discovery](https://docs.pytest.org/en/latest/goodpractices.html#test-discovery) to know how your test files and methods should be named. +First install test dependencies into your virtual environment: +``` +pip install .[tests] +``` +### Unit Tests +To run unit tests locally, from the connector directory run: +``` +python -m pytest unit_tests +``` + +### Integration Tests +There are two types of integration tests: Acceptance Tests (Airbyte's test suite for all source connectors) and custom integration tests (which are specific to this connector). +#### Custom Integration tests +Place custom tests inside `integration_tests/` folder, then, from the connector root, run +``` +python -m pytest integration_tests +``` +#### Acceptance Tests +Customize `acceptance-test-config.yml` file to configure tests. See [Source Acceptance Tests](source-acceptance-tests.md) for more information. +If your connector requires to create or destroy resources for use during acceptance tests create fixtures for it and place them inside integration_tests/acceptance.py. +To run your integration tests with acceptance tests, from the connector root, run +``` +python -m pytest integration_tests -p integration_tests.acceptance +``` +To run your integration tests with docker + +### Using gradle to run tests +All commands should be run from airbyte project root. +To run unit tests: +``` +./gradlew :airbyte-integrations:connectors:source-aws-cloudtrail:unitTest +``` +To run acceptance and custom integration tests: +``` +./gradlew :airbyte-integrations:connectors:source-aws-cloudtrail:integrationTest +``` + +## Dependency Management +All of your dependencies should go in `setup.py`, NOT `requirements.txt`. The requirements file is only used to connect internal Airbyte dependencies in the monorepo for local development. +We split dependencies between two groups, dependencies that are: +* required for your connector to work need to go to `MAIN_REQUIREMENTS` list. +* required for the testing need to go to `TEST_REQUIREMENTS` list + +### Publishing a new version of the connector +You've checked out the repo, implemented a million dollar feature, and you're ready to share your changes with the world. Now what? +1. Make sure your changes are passing unit and integration tests. +1. Bump the connector version in `Dockerfile` -- just increment the value of the `LABEL io.airbyte.version` appropriately (we use [SemVer](https://semver.org/)). +1. Create a Pull Request. +1. Pat yourself on the back for being an awesome contributor. +1. Someone from Airbyte will take a look at your PR and iterate with you to merge it into master. diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/acceptance-test-config.yml b/airbyte-integrations/connectors/source-aws-cloudtrail/acceptance-test-config.yml new file mode 100644 index 000000000000..7914f6d8ca50 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/acceptance-test-config.yml @@ -0,0 +1,26 @@ +# See [Source Acceptance Tests](https://docs.airbyte.io/contributing-to-airbyte/building-new-connector/source-acceptance-tests.md) +# for more information about how to configure these tests +connector_image: airbyte/source-aws-cloudtrail:dev +tests: + spec: + - spec_path: "source_aws_cloudtrail/spec.json" + connection: + - config_path: "secrets/config.json" + status: "succeed" + - config_path: "integration_tests/invalid_config.json" + status: "failed" + discovery: + - config_path: "secrets/config.json" + incremental: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + #future_state_path: "integration_tests/abnormal_state.json" + cursor_paths: + management_events: ["EventTime"] + basic_read: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" + validate_output_from_all_streams: yes + full_refresh: + - config_path: "secrets/config.json" + configured_catalog_path: "integration_tests/configured_catalog.json" diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/acceptance-test-docker.sh b/airbyte-integrations/connectors/source-aws-cloudtrail/acceptance-test-docker.sh new file mode 100644 index 000000000000..1425ff74f151 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/acceptance-test-docker.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env sh +docker run --rm -it \ + -v /var/run/docker.sock:/var/run/docker.sock \ + -v /tmp:/tmp \ + -v $(pwd):/test_input \ + airbyte/source-acceptance-test \ + --acceptance-test-config /test_input diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/build.gradle b/airbyte-integrations/connectors/source-aws-cloudtrail/build.gradle new file mode 100644 index 000000000000..375580485fd0 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/build.gradle @@ -0,0 +1,9 @@ +plugins { + id 'airbyte-python' + id 'airbyte-docker' + id 'airbyte-source-acceptance-test' +} + +airbytePython { + moduleDirectory 'source_aws_cloudtrail' +} diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/__init__.py b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/abnormal_state.json new file mode 100644 index 000000000000..bf380aaad20e --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/abnormal_state.json @@ -0,0 +1,5 @@ +{ + "management_events": { + "EventTime": 9999999999 + } +} diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/acceptance.py b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/acceptance.py new file mode 100644 index 000000000000..eeb4a2d3e02e --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/acceptance.py @@ -0,0 +1,36 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + + +import pytest + +pytest_plugins = ("source_acceptance_test.plugin",) + + +@pytest.fixture(scope="session", autouse=True) +def connector_setup(): + """ This fixture is a placeholder for external resources that acceptance test might require.""" + # TODO: setup test dependencies if needed. otherwise remove the TODO comments + yield + # TODO: clean up test dependencies diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/configured_catalog.json b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/configured_catalog.json new file mode 100644 index 000000000000..dace4d7a43b0 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/configured_catalog.json @@ -0,0 +1,59 @@ +{ + "streams": [ + { + "stream": { + "name": "management_events", + "supported_sync_modes": ["incremental", "full_refresh"], + "source_defined_cursor": true, + "default_cursor_field": ["EventTime"], + "json_schema": { + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "AccessKeyId": { + "type": "string" + }, + "CloudTrailEvent": { + "type": "string" + }, + "EventId": { + "type": "string" + }, + "EventName": { + "type": "string" + }, + "EventSource": { + "type": "string" + }, + "EventTime": { + "type": "integer" + }, + "ReadOnly": { + "type": "string" + }, + "Resources": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "ResourceName": { + "type": "string" + }, + "ResourceType": { + "type": "string" + } + } + } + }, + "Username": { + "type": "string" + } + }, + "type": "object" + } + }, + "sync_mode": "incremental", + "destination_sync_mode": "append", + "cursor_field": ["EventTime"] + } + ] +} diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/invalid_config.json b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/invalid_config.json new file mode 100644 index 000000000000..6ea349b90f87 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/invalid_config.json @@ -0,0 +1,7 @@ +{ + "aws_key_id": "123", + "aws_secret_key": "123", + "aws_region_name": "eu-west-1", + "start_date": "2020-06-09", + "records_limit": 250 +} diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/state.json b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/state.json new file mode 100644 index 000000000000..9132009f5e57 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/integration_tests/state.json @@ -0,0 +1,5 @@ +{ + "management_events": { + "EventTime": 1623346856 + } +} diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/main.py b/airbyte-integrations/connectors/source-aws-cloudtrail/main.py new file mode 100644 index 000000000000..7496270cdc33 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/main.py @@ -0,0 +1,33 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + + +import sys + +from airbyte_cdk.entrypoint import launch +from source_aws_cloudtrail import SourceAwsCloudtrail + +if __name__ == "__main__": + source = SourceAwsCloudtrail() + launch(source, sys.argv[1:]) diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/requirements.txt b/airbyte-integrations/connectors/source-aws-cloudtrail/requirements.txt new file mode 100644 index 000000000000..0411042aa091 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/requirements.txt @@ -0,0 +1,2 @@ +-e ../../bases/source-acceptance-test +-e . diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/setup.py b/airbyte-integrations/connectors/source-aws-cloudtrail/setup.py new file mode 100644 index 000000000000..9cf02e6a8422 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/setup.py @@ -0,0 +1,46 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + + +from setuptools import find_packages, setup + +MAIN_REQUIREMENTS = ["airbyte-cdk", "boto3==1.17.*"] + +TEST_REQUIREMENTS = [ + "pytest~=6.1", + "source-acceptance-test", +] + +setup( + name="source_aws_cloudtrail", + description="Source implementation for Aws Cloudtrail.", + author="Airbyte", + author_email="contact@airbyte.io", + packages=find_packages(), + install_requires=MAIN_REQUIREMENTS, + package_data={"": ["*.json", "schemas/*.json", "schemas/shared/*.json"]}, + extras_require={ + "tests": TEST_REQUIREMENTS, + }, +) diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/__init__.py b/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/__init__.py new file mode 100644 index 000000000000..25ae341e6a54 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/__init__.py @@ -0,0 +1,27 @@ +""" +MIT License + +Copyright (c) 2020 Airbyte + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +""" + +from .source import SourceAwsCloudtrail + +__all__ = ["SourceAwsCloudtrail"] diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/schemas/management_events.json b/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/schemas/management_events.json new file mode 100644 index 000000000000..385400272fae --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/schemas/management_events.json @@ -0,0 +1,44 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "AccessKeyId": { + "type": ["null", "string"] + }, + "CloudTrailEvent": { + "type": ["null", "string"] + }, + "EventId": { + "type": ["null", "string"] + }, + "EventName": { + "type": ["null", "string"] + }, + "EventSource": { + "type": ["null", "string"] + }, + "EventTime": { + "type": ["null", "integer"] + }, + "ReadOnly": { + "type": ["null", "string"] + }, + "Resources": { + "type": ["null", "array"], + "items": { + "type": ["null", "object"], + "properties": { + "ResourceName": { + "type": ["null", "string"] + }, + "ResourceType": { + "type": ["null", "string"] + } + } + } + }, + "Username": { + "type": ["null", "string"] + } + } +} diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/source.py b/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/source.py new file mode 100644 index 000000000000..03ba00fed0b2 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/source.py @@ -0,0 +1,220 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + + +import math +from abc import ABC, abstractmethod +from datetime import datetime +from typing import Any, Iterable, List, Mapping, MutableMapping, Optional, Tuple + +import boto3 +import botocore +import botocore.exceptions +import pendulum +from airbyte_cdk import AirbyteLogger +from airbyte_cdk.models import SyncMode +from airbyte_cdk.sources import AbstractSource +from airbyte_cdk.sources.streams import Stream +from botocore.config import Config + + +class Client: + def __init__(self, aws_key_id: str, aws_secret_key: str, aws_region_name: str): + config = Config( + parameter_validation=False, + retries=dict( + # use similar configuration as in http source + max_attempts=5, + # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html#adaptive-retry-mode + mode="adaptive", + ), + ) + + self.session: botocore.client.CloudTrail = boto3.client( + "cloudtrail", aws_access_key_id=aws_key_id, aws_secret_access_key=aws_secret_key, region_name=aws_region_name, config=config + ) + + +class AwsCloudtrailStream(Stream, ABC): + limit: int = 50 + + start_date_format = "YYYY-MM-DD" + + def __init__(self, aws_key_id: str, aws_secret_key: str, aws_region_name: str, start_date: str, **kwargs): + self.aws_secret_key = aws_secret_key + self.aws_key_id = aws_key_id + self.start_date = pendulum.from_format(start_date, self.start_date_format).int_timestamp + self.client = Client(aws_key_id, aws_secret_key, aws_region_name) + # records_limit: is an option to limit maximum amount of records read by connector + # use it for testing and development porpuses only + self.records_left = kwargs.get("records_limit", math.inf) + + def next_page_token(self, response: Mapping[str, Any]) -> Optional[Mapping[str, Any]]: + return response.get("NextToken") + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + params = {"MaxResults": self.limit} + + if self.start_date: + params["StartTime"] = self.start_date + if next_page_token: + params["NextToken"] = next_page_token + + return params + + def datetime_to_timestamp(self, date: datetime) -> int: + return int(datetime.timestamp(date)) + + @abstractmethod + def send_request(self, **kwargs) -> Mapping[str, Any]: + """ + This method should be overridden by subclasses to send proper request with appropriate parameters to CloudTrail + """ + pass + + def is_read_limit_reached(self) -> bool: + if self.records_left <= 0: + # limit of fetched records is reached + return True + return False + + def read_records( + self, + sync_mode: SyncMode, + cursor_field: List[str] = None, + stream_slice: Mapping[str, Any] = None, + stream_state: Mapping[str, Any] = None, + ) -> Iterable[Mapping[str, Any]]: + stream_state = stream_state or {} + pagination_complete = False + next_page_token = None + + if self.is_read_limit_reached(): + return iter(()) + + while not pagination_complete: + params = self.request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) + response = self.send_request(**params) + + next_page_token = self.next_page_token(response) + if not next_page_token: + pagination_complete = True + + for record in self.parse_response(response): + yield record + self.records_left -= 1 + + if self.is_read_limit_reached(): + return iter(()) + + yield from [] + + +class IncrementalAwsCloudtrailStream(AwsCloudtrailStream, ABC): + + # API does not support read in ascending order + # save state only once after full read + state_checkpoint_interval = None + + def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]) -> Mapping[str, Any]: + record_time = latest_record[self.time_field] + return {self.cursor_field: max(record_time, current_stream_state.get(self.cursor_field, 0))} + + +class ManagementEvents(IncrementalAwsCloudtrailStream): + primary_key = "EventId" + + time_field = "EventTime" + + cursor_field = "EventTime" + + data_field = "Events" + + data_lifetime = 90 * (24 * 60 * 60) # in seconds (90 days) + + def send_request(self, **kwargs) -> Mapping[str, Any]: + return self.client.session.lookup_events(**kwargs) + + def request_params( + self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, any] = None, next_page_token: Mapping[str, Any] = None + ) -> MutableMapping[str, Any]: + params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) + + if stream_slice: + # override time ranges using slice + if stream_slice.get("StartTime"): + params["StartTime"] = stream_slice["StartTime"] + if stream_slice.get("EndTime"): + params["EndTime"] = stream_slice["EndTime"] + + return params + + def parse_response(self, response: dict, **kwargs) -> Iterable[Mapping]: + for event in response[self.data_field]: + # boto3 converts timestamps to datetime object + # we need to convert it back to timestamp to persist original API type + event[self.time_field] = self.datetime_to_timestamp(event[self.time_field]) + yield event + + def stream_slices( + self, sync_mode: SyncMode, cursor_field: List[str] = None, stream_state: Mapping[str, Any] = None + ) -> Iterable[Optional[Mapping[str, Any]]]: + """ + Slices whole time range to more granular slices (24h slices). Latest time slice should be the first to avoid data loss + """ + cursor_data = stream_state.get(self.cursor_field) if stream_state else 0 + end_time = pendulum.now() + # API stores data for last 90 days. Adjust starting time to avoid unnecessary API requests + # ignores state if start_date option is higher than cursor + start_time = max(end_time.int_timestamp - self.data_lifetime, self.start_date, cursor_data) + last_start_time = pendulum.from_timestamp(start_time) + + slices = [] + while last_start_time < end_time: + slices.append( + { + "StartTime": last_start_time.int_timestamp, + # decrement second as API include records with specified StartTime and EndTime + "EndTime": last_start_time.add(days=1).int_timestamp - 1, + } + ) + last_start_time = last_start_time.add(days=1) + + return slices + + +class SourceAwsCloudtrail(AbstractSource): + def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, any]: + client = Client(config["aws_key_id"], config["aws_secret_key"], config["aws_region_name"]) + try: + client.session.lookup_events(MaxResults=1) + except botocore.exceptions.ClientError as error: + return False, error + + return True, None + + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + return [ManagementEvents(**config)] diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/spec.json b/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/spec.json new file mode 100644 index 000000000000..8aea96ee1d22 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/source_aws_cloudtrail/spec.json @@ -0,0 +1,38 @@ +{ + "documentationUrl": "https://docs.airbyte.io/integrations/sources/aws-cloudtrail", + "connectionSpecification": { + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Aws CloudTrail Spec", + "type": "object", + "required": [ + "aws_key_id", + "aws_secret_key", + "aws_region_name", + "start_date" + ], + "additionalProperties": false, + "properties": { + "aws_key_id": { + "type": "string", + "description": "Specifies an AWS access key associated with an IAM user or role.", + "airbyte_secret": true + }, + "aws_secret_key": { + "type": "string", + "description": "Specifies the secret key associated with the access key. This is essentially the 'password' for the access key.", + "airbyte_secret": true + }, + "aws_region_name": { + "type": "string", + "description": "The default AWS Region to use, for example, us-west-1 or us-west-2. When specifying a Region inline during client initialization, this property is named region_name." + }, + "start_date": { + "type": "string", + "description": "The date you would like to replicate data. Data in ClouTraid is available for last 90 days only. Format: YYYY-MM-DD.", + "examples": ["2021-01-01"], + "default": "1970-01-01", + "pattern": "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + } + } + } +} diff --git a/airbyte-integrations/connectors/source-aws-cloudtrail/unit_tests/test_event_stream_slices.py b/airbyte-integrations/connectors/source-aws-cloudtrail/unit_tests/test_event_stream_slices.py new file mode 100644 index 000000000000..924041b43127 --- /dev/null +++ b/airbyte-integrations/connectors/source-aws-cloudtrail/unit_tests/test_event_stream_slices.py @@ -0,0 +1,106 @@ +# +# MIT License +# +# Copyright (c) 2020 Airbyte +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# + +from itertools import islice + +import pendulum +from airbyte_cdk.models import SyncMode +from source_aws_cloudtrail.source import ManagementEvents + +config = { + "aws_key_id": "1", + "aws_secret_key": "1", + "aws_region_name": "us-west-1", + "start_date": "2020-05-01", +} + + +def test_full_refresh_slice(): + current_time = pendulum.now().int_timestamp + stream = ManagementEvents(**config) + slices = stream.stream_slices(sync_mode=SyncMode.full_refresh, cursor_field=stream.cursor_field) + + # checks that start time not more than 90 days before now + assert slices[0]["StartTime"] >= current_time - ManagementEvents.data_lifetime + # checks that end time not less than now + assert slices[-1]["EndTime"] >= current_time + + +def test_incremental_slice(): + current_time = pendulum.now().int_timestamp + stream = ManagementEvents(**config) + stream_state = {"EventTime": pendulum.today().subtract(days=15).int_timestamp} + + slices = stream.stream_slices( + sync_mode=SyncMode.incremental, + cursor_field=stream.cursor_field, + stream_state=stream_state, + ) + + # checks that start time equals to time in stream_state + assert slices[0]["StartTime"] == stream_state["EventTime"] + # checks that end time not less than now + assert slices[-1]["EndTime"] >= current_time + + +def test_incremental_slice_state_less_than_start_date(): + current_time = pendulum.now().int_timestamp + stream = ManagementEvents(**config) + stream_state = {"EventTime": 1} + + slices = stream.stream_slices( + sync_mode=SyncMode.incremental, + cursor_field=stream.cursor_field, + stream_state=stream_state, + ) + + # checks that start time not equals to time in stream_state + assert slices[0]["StartTime"] != stream_state["EventTime"] + # checks that start time not more than 90 days before now + assert slices[0]["StartTime"] >= current_time - ManagementEvents.data_lifetime + + +def test_full_refresh_slice_start_date_greater_than_now(): + config_with_big_start_date = config.copy() + config_with_big_start_date["start_date"] = pendulum.now().add(days=1).format(ManagementEvents.start_date_format) + + stream = ManagementEvents(**config_with_big_start_date) + slices = stream.stream_slices(sync_mode=SyncMode.full_refresh, cursor_field=stream.cursor_field) + + # checks that there no slices + assert not slices + + +def test_slices_not_intersect(): + stream = ManagementEvents(**config) + slices = stream.stream_slices(sync_mode=SyncMode.full_refresh, cursor_field=stream.cursor_field) + + # verify that StartTime and EndTime are not equal + # next StartTime = EndTime + 1 + for slice, next_slice in zip(slices, islice(slices, 1, None)): + if next_slice is None: + break + + assert slice["EndTime"] + 1 == next_slice["StartTime"] + assert slice["EndTime"] > slice["StartTime"] diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index e1addedba538..a39903f0e930 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -34,6 +34,7 @@ * [Amplitude](integrations/sources/amplitude.md) * [Appstore](integrations/sources/appstore.md) * [Asana](integrations/sources/asana.md) + * [AWS CloudTrail](integrations/sources/aws-cloudtrail.md) * [Braintree](integrations/sources/braintree.md) * [ClickHouse](integrations/sources/clickhouse.md) * [Db2](integrations/sources/db2.md) @@ -166,7 +167,7 @@ * [Changelog](project-overview/changelog/README.md) * [Platform](project-overview/changelog/platform.md) * [Connectors](project-overview/changelog/connectors.md) - * [Code of Conduct](project-overview/code-of-conduct.md) + * [Code of Conduct](project-overview/code-of-conduct.md) * [License](project-overview/license.md) * [Careers & Open Positions](career-and-open-positions/README.md) * [Senior Software Engineer](career-and-open-positions/senior-software-engineer.md) diff --git a/docs/integrations/sources/aws-cloudtrail.md b/docs/integrations/sources/aws-cloudtrail.md new file mode 100644 index 000000000000..0c1e5df2a281 --- /dev/null +++ b/docs/integrations/sources/aws-cloudtrail.md @@ -0,0 +1,56 @@ +# AWS CloudTrail + +## Overview + +The AWS CloudTrail source supports both Full Refresh and Incremental syncs. You can choose if this connector will copy only the new or updated data, or all rows in the tables and columns you set up for replication, every time a sync is run. + +This Source Connector is based on a [Boto3 CloudTrail](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudtrail.html). + +### Output schema + +This Source is capable of syncing the following core Streams: + +* [Management Events](https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/cloudtrail.html#CloudTrail.Client.lookup_events) + +Insight events are not supported right now. Only Management events are available. + +### Data type mapping + +| Integration Type | Airbyte Type | Notes | +| :--- | :--- | :--- | +| `string` | `string` | | +| `number` | `integer` | | +| `array` | `array` | | +| `object` | `object` | | + +### Features + +| Feature | Supported?\(Yes/No\) | Notes | +| :--- | :--- | :--- | +| Full Refresh Sync | Yes | | +| Incremental Sync | Yes | | +| Namespaces | No | | + +### Performance considerations + +The rate of lookup requests for `events` stream is limited to two per second, per account, per region. +This connector gracefully retries when encountering a throttling error. However if the errors continue repeatedly after multiple retries (for example if you setup many instances of this connector using the same account and region), the connector sync will fail. + +## Getting started + +### Requirements + +* AWS Access key ID +* AWS Secret access key +* AWS region name + +### Setup guide + +Please, follow this [steps](https://docs.aws.amazon.com/powershell/latest/userguide/pstools-appendix-sign-up.html) to get your AWS access key and secret. + + +## Changelog + +| Version | Date | Pull Request | Subject | +| :------ | :-------- | :----- | :------ | +| 0.1.0 | 2021-06-23 | [4122](https://github.com/airbytehq/airbyte/pull/4122) | Initial release supporting the LookupEvent API | diff --git a/tools/bin/ci_credentials.sh b/tools/bin/ci_credentials.sh index c747b1c1b5f5..75df02db3bbe 100755 --- a/tools/bin/ci_credentials.sh +++ b/tools/bin/ci_credentials.sh @@ -91,3 +91,4 @@ write_standard_creds source-zoom-singer "$ZOOM_INTEGRATION_TEST_CREDS" write_standard_creds source-plaid "$PLAID_INTEGRATION_TEST_CREDS" write_standard_creds source-file "$AZURE_STORAGE_INTEGRATION_TEST_CREDS" "azblob.json" write_standard_creds source-snowflake "$SNOWFLAKE_INTEGRATION_TEST_CREDS" "config.json" +write_standard_creds source-aws-cloudtrail "$SOURCE_AWS_CLOUDTRAIL_CREDS"