From b2bdfb0f1287e3cd221e9f1e8afd2931282e44bd Mon Sep 17 00:00:00 2001 From: "Roman Yermilov [GL]" <86300758+roman-yermilov-gl@users.noreply.github.com> Date: Mon, 13 May 2024 14:53:48 +0200 Subject: [PATCH] Source Jira: migration to low-code (#37374) Co-authored-by: Anatolii Yatsuk Co-authored-by: Serhii Lazebnyi <53845333+lazebnyi@users.noreply.github.com> --- .../integration_tests/abnormal_state.json | 249 ++- .../integration_tests/sample_state.json | 282 +++- .../connectors/source-jira/metadata.yaml | 10 +- .../connectors/source-jira/poetry.lock | 137 +- .../connectors/source-jira/pyproject.toml | 2 +- .../source_jira/components/__init__.py | 0 .../source_jira/components/extractors.py | 21 + .../components/partition_routers.py | 104 ++ .../source_jira/components/transformations.py | 25 + .../source_jira/config_migrations.py | 102 -- .../source-jira/source_jira/manifest.yaml | 1333 +++++++++++++++++ .../source-jira/source_jira/source.py | 222 +-- .../source-jira/source_jira/streams.py | 1231 ++------------- .../source-jira/unit_tests/__init__.py | 3 - .../source-jira/unit_tests/conftest.py | 161 +- .../unit_tests/responses/issue_fields.json | 14 + .../unit_tests/responses/issues.json | 3 +- .../source-jira/unit_tests/test_components.py | 121 ++ .../unit_tests/test_date_time_transformer.py | 11 +- .../test_migrations/test_config.json | 10 - .../test_migrations/test_config_migrations.py | 68 - .../source-jira/unit_tests/test_pagination.py | 15 +- .../source-jira/unit_tests/test_source.py | 4 +- .../source-jira/unit_tests/test_streams.py | 738 +++++---- docs/integrations/sources/jira-migrations.md | 26 + docs/integrations/sources/jira.md | 3 +- 26 files changed, 2782 insertions(+), 2113 deletions(-) create mode 100644 airbyte-integrations/connectors/source-jira/source_jira/components/__init__.py create mode 100644 airbyte-integrations/connectors/source-jira/source_jira/components/extractors.py create mode 100644 airbyte-integrations/connectors/source-jira/source_jira/components/partition_routers.py create mode 100644 airbyte-integrations/connectors/source-jira/source_jira/components/transformations.py delete mode 100644 airbyte-integrations/connectors/source-jira/source_jira/config_migrations.py create mode 100644 airbyte-integrations/connectors/source-jira/source_jira/manifest.yaml delete mode 100644 airbyte-integrations/connectors/source-jira/unit_tests/__init__.py create mode 100644 airbyte-integrations/connectors/source-jira/unit_tests/test_components.py delete mode 100644 airbyte-integrations/connectors/source-jira/unit_tests/test_migrations/test_config.json delete mode 100644 airbyte-integrations/connectors/source-jira/unit_tests/test_migrations/test_config_migrations.py diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/abnormal_state.json b/airbyte-integrations/connectors/source-jira/integration_tests/abnormal_state.json index 727d1fa0207e..9dc1716efb6b 100644 --- a/airbyte-integrations/connectors/source-jira/integration_tests/abnormal_state.json +++ b/airbyte-integrations/connectors/source-jira/integration_tests/abnormal_state.json @@ -3,63 +3,276 @@ "type": "STREAM", "stream": { "stream_descriptor": { - "name": "board_issues" + "name": "board_issues", + "namespace": null }, "stream_state": { - "1": { - "updated": "2122-01-01T00:00:00Z" - }, - "17": { - "updated": "2122-01-01T00:00:00Z" - }, - "58": { - "updated": "2122-01-01T00:00:00Z" - } + "states": [ + { + "partition": { + "board_id": 1, + "parent_slice": {} + }, + "cursor": { + "updated": "2222-10-12T13:43:50.735-0700" + } + }, + { + "partition": { + "board_id": 17, + "parent_slice": {} + }, + "cursor": { + "updated": "2222-07-05T12:57:51.258-0700" + } + }, + { + "partition": { + "board_id": 58, + "parent_slice": {} + }, + "cursor": { + "updated": "2222-05-08T03:04:45.139-0700" + } + } + ] } + }, + "sourceStats": { + "recordCount": 1.0 } }, { "type": "STREAM", "stream": { "stream_descriptor": { - "name": "issues" + "name": "issues", + "namespace": null }, "stream_state": { - "updated": "2122-01-01T00:00:00Z" + "states": [ + { + "partition": { + "parent_slice": {}, + "project_id": "10000" + }, + "cursor": { + "updated": "2222-10-12T13:43:50.735-0700" + } + }, + { + "partition": { + "parent_slice": {}, + "project_id": "10016" + }, + "cursor": { + "updated": "2222-07-05T12:57:51.258-0700" + } + }, + { + "partition": { + "parent_slice": {}, + "project_id": "10064" + }, + "cursor": { + "updated": "2222-05-08T03:04:45.139-0700" + } + } + ] } + }, + "sourceStats": { + "recordCount": 1.0 } }, { "type": "STREAM", "stream": { "stream_descriptor": { - "name": "issue_comments" + "name": "issue_worklogs", + "namespace": null }, "stream_state": { - "updated": "2122-01-01T00:00:00Z" + "updated": "2222-05-08T03:04:45.056-0700" } + }, + "sourceStats": { + "recordCount": 21.0 } }, { "type": "STREAM", "stream": { "stream_descriptor": { - "name": "issue_worklogs" + "name": "issue_comments", + "namespace": null }, "stream_state": { - "updated": "2122-01-01T00:00:00Z" + "updated": "2222-12-08T06:32:22.567-0800" } + }, + "sourceStats": { + "recordCount": 2.0 } }, { "type": "STREAM", "stream": { "stream_descriptor": { - "name": "sprint_issues" + "name": "sprint_issues", + "namespace": null }, "stream_state": { - "updated": "2122-01-01T00:00:00Z" + "states": [ + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 2 + }, + "cursor": { + "updated": "2222-10-12T13:30:02.307-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 3 + }, + "cursor": { + "updated": "2222-04-05T04:57:26.258-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 4 + }, + "cursor": { + "updated": "2222-04-05T04:57:48.978-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 5 + }, + "cursor": { + "updated": "2222-04-05T04:57:56.925-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 6 + }, + "cursor": { + "updated": "2222-04-05T04:58:03.147-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 7 + }, + "cursor": { + "updated": "2222-04-05T04:58:10.710-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 8 + }, + "cursor": { + "updated": "2222-04-05T04:58:18.778-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 9 + }, + "cursor": { + "updated": "2222-10-12T13:43:50.735-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 10 + }, + "cursor": { + "updated": "2222-04-05T04:58:35.329-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 11 + }, + "cursor": { + "updated": "2222-04-05T04:58:53.600-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 17, + "parent_slice": {} + }, + "sprint_id": 2 + }, + "cursor": { + "updated": "2222-10-12T13:30:02.307-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 58, + "parent_slice": {} + }, + "sprint_id": 12 + }, + "cursor": { + "updated": "2222-05-08T03:04:45.139-0700" + } + } + ] } + }, + "sourceStats": { + "recordCount": 1.0 } } ] diff --git a/airbyte-integrations/connectors/source-jira/integration_tests/sample_state.json b/airbyte-integrations/connectors/source-jira/integration_tests/sample_state.json index 34de4187323c..7de4731a49d6 100644 --- a/airbyte-integrations/connectors/source-jira/integration_tests/sample_state.json +++ b/airbyte-integrations/connectors/source-jira/integration_tests/sample_state.json @@ -1,10 +1,278 @@ -{ - "issues": { - "updated": "2021-06-02T22:18:39.882Z" +[ + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "board_issues", + "namespace": null + }, + "stream_state": { + "states": [ + { + "partition": { + "board_id": 1, + "parent_slice": {} + }, + "cursor": { + "updated": "2023-10-12T13:43:50.735-0700" + } + }, + { + "partition": { + "board_id": 17, + "parent_slice": {} + }, + "cursor": { + "updated": "2023-07-05T12:57:51.258-0700" + } + }, + { + "partition": { + "board_id": 58, + "parent_slice": {} + }, + "cursor": { + "updated": "2023-05-08T03:04:45.139-0700" + } + } + ] + } + }, + "sourceStats": { + "recordCount": 1.0 + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "issues", + "namespace": null + }, + "stream_state": { + "states": [ + { + "partition": { + "parent_slice": {}, + "project_id": "10000" + }, + "cursor": { + "updated": "2023-10-12T13:43:50.735-0700" + } + }, + { + "partition": { + "parent_slice": {}, + "project_id": "10016" + }, + "cursor": { + "updated": "2023-07-05T12:57:51.258-0700" + } + }, + { + "partition": { + "parent_slice": {}, + "project_id": "10064" + }, + "cursor": { + "updated": "2023-05-08T03:04:45.139-0700" + } + } + ] + } + }, + "sourceStats": { + "recordCount": 1.0 + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "issue_worklogs", + "namespace": null + }, + "stream_state": { + "updated": "2023-05-08T03:04:45.056-0700" + } + }, + "sourceStats": { + "recordCount": 21.0 + } + }, + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "issue_comments", + "namespace": null + }, + "stream_state": { + "updated": "2022-12-08T06:32:22.567-0800" + } + }, + "sourceStats": { + "recordCount": 2.0 + } }, - "issue_worklogs": { - "10622": { - "started": "2021-02-14T11:30:22.313Z" + { + "type": "STREAM", + "stream": { + "stream_descriptor": { + "name": "sprint_issues", + "namespace": null + }, + "stream_state": { + "states": [ + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 2 + }, + "cursor": { + "updated": "2023-10-12T13:30:02.307-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 3 + }, + "cursor": { + "updated": "2023-04-05T04:57:26.258-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 4 + }, + "cursor": { + "updated": "2023-04-05T04:57:48.978-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 5 + }, + "cursor": { + "updated": "2023-04-05T04:57:56.925-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 6 + }, + "cursor": { + "updated": "2023-04-05T04:58:03.147-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 7 + }, + "cursor": { + "updated": "2023-04-05T04:58:10.710-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 8 + }, + "cursor": { + "updated": "2023-04-05T04:58:18.778-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 9 + }, + "cursor": { + "updated": "2023-10-12T13:43:50.735-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 10 + }, + "cursor": { + "updated": "2023-04-05T04:58:35.329-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 1, + "parent_slice": {} + }, + "sprint_id": 11 + }, + "cursor": { + "updated": "2023-04-05T04:58:53.600-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 17, + "parent_slice": {} + }, + "sprint_id": 2 + }, + "cursor": { + "updated": "2023-10-12T13:30:02.307-0700" + } + }, + { + "partition": { + "parent_slice": { + "board_id": 58, + "parent_slice": {} + }, + "sprint_id": 12 + }, + "cursor": { + "updated": "2023-05-08T03:04:45.139-0700" + } + } + ] + } + }, + "sourceStats": { + "recordCount": 1.0 } } -} +] diff --git a/airbyte-integrations/connectors/source-jira/metadata.yaml b/airbyte-integrations/connectors/source-jira/metadata.yaml index 8d7d382f83ae..36c23f5696e2 100644 --- a/airbyte-integrations/connectors/source-jira/metadata.yaml +++ b/airbyte-integrations/connectors/source-jira/metadata.yaml @@ -10,7 +10,7 @@ data: connectorSubtype: api connectorType: source definitionId: 68e63de2-bb83-4c7e-93fa-a8a9051e3993 - dockerImageTag: 1.2.2 + dockerImageTag: 2.0.0 dockerRepository: airbyte/source-jira documentationUrl: https://docs.airbyte.com/integrations/sources/jira githubIssueLabel: source-jira @@ -30,6 +30,12 @@ data: releaseStage: generally_available releases: breakingChanges: + 2.0.0: + message: "The source Jira connector is being migrated from the Python CDK to our declarative low-code CDK. Due to changes to the incremental stream state, this migration constitutes a breaking change. Additionally, an issue in the `ProjectAvatars` stream has been fixed. After updating, please reset your source before resuming syncs. For more information, see our migration documentation for source Jira." + upgradeDeadline: "2024-05-31" + scopedImpact: + - scopeType: stream + impactedScopes: ["board_issues", "issues", "sprint_issues"] 1.0.0: message: "Stream state will be saved for every board in stream `Boards Issues`. @@ -48,5 +54,5 @@ data: supportLevel: certified tags: - language:python - - cdk:python + - cdk:low-code metadataSpecVersion: "1.0" diff --git a/airbyte-integrations/connectors/source-jira/poetry.lock b/airbyte-integrations/connectors/source-jira/poetry.lock index d20b7f18673c..bc6fc2ead927 100644 --- a/airbyte-integrations/connectors/source-jira/poetry.lock +++ b/airbyte-integrations/connectors/source-jira/poetry.lock @@ -288,13 +288,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.0" +version = "1.2.1" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [package.extras] @@ -312,13 +312,13 @@ files = [ [[package]] name = "idna" -version = "3.6" +version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -511,28 +511,29 @@ pytzdata = ">=2020.1" [[package]] name = "platformdirs" -version = "4.2.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "4.2.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, - {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, + {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, + {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] [[package]] name = "pluggy" -version = "1.4.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] @@ -552,47 +553,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.14" +version = "1.10.15" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7f4fcec873f90537c382840f330b90f4715eebc2bc9925f04cb92de593eae054"}, - {file = "pydantic-1.10.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e3a76f571970fcd3c43ad982daf936ae39b3e90b8a2e96c04113a369869dc87"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d886bd3c3fbeaa963692ef6b643159ccb4b4cefaf7ff1617720cbead04fd1d"}, - {file = "pydantic-1.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:798a3d05ee3b71967844a1164fd5bdb8c22c6d674f26274e78b9f29d81770c4e"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:23d47a4b57a38e8652bcab15a658fdb13c785b9ce217cc3a729504ab4e1d6bc9"}, - {file = "pydantic-1.10.14-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9f674b5c3bebc2eba401de64f29948ae1e646ba2735f884d1594c5f675d6f2a"}, - {file = "pydantic-1.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:24a7679fab2e0eeedb5a8924fc4a694b3bcaac7d305aeeac72dd7d4e05ecbebf"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9d578ac4bf7fdf10ce14caba6f734c178379bd35c486c6deb6f49006e1ba78a7"}, - {file = "pydantic-1.10.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa7790e94c60f809c95602a26d906eba01a0abee9cc24150e4ce2189352deb1b"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad4e10efa5474ed1a611b6d7f0d130f4aafadceb73c11d9e72823e8f508e663"}, - {file = "pydantic-1.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1245f4f61f467cb3dfeced2b119afef3db386aec3d24a22a1de08c65038b255f"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:21efacc678a11114c765eb52ec0db62edffa89e9a562a94cbf8fa10b5db5c046"}, - {file = "pydantic-1.10.14-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:412ab4a3f6dbd2bf18aefa9f79c7cca23744846b31f1d6555c2ee2b05a2e14ca"}, - {file = "pydantic-1.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:e897c9f35281f7889873a3e6d6b69aa1447ceb024e8495a5f0d02ecd17742a7f"}, - {file = "pydantic-1.10.14-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d604be0f0b44d473e54fdcb12302495fe0467c56509a2f80483476f3ba92b33c"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a42c7d17706911199798d4c464b352e640cab4351efe69c2267823d619a937e5"}, - {file = "pydantic-1.10.14-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:596f12a1085e38dbda5cbb874d0973303e34227b400b6414782bf205cc14940c"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bfb113860e9288d0886e3b9e49d9cf4a9d48b441f52ded7d96db7819028514cc"}, - {file = "pydantic-1.10.14-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc3ed06ab13660b565eed80887fcfbc0070f0aa0691fbb351657041d3e874efe"}, - {file = "pydantic-1.10.14-cp37-cp37m-win_amd64.whl", hash = "sha256:ad8c2bc677ae5f6dbd3cf92f2c7dc613507eafe8f71719727cbc0a7dec9a8c01"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c37c28449752bb1f47975d22ef2882d70513c546f8f37201e0fec3a97b816eee"}, - {file = "pydantic-1.10.14-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49a46a0994dd551ec051986806122767cf144b9702e31d47f6d493c336462597"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53e3819bd20a42470d6dd0fe7fc1c121c92247bca104ce608e609b59bc7a77ee"}, - {file = "pydantic-1.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbb503bbbbab0c588ed3cd21975a1d0d4163b87e360fec17a792f7d8c4ff29f"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:336709883c15c050b9c55a63d6c7ff09be883dbc17805d2b063395dd9d9d0022"}, - {file = "pydantic-1.10.14-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ae57b4d8e3312d486e2498d42aed3ece7b51848336964e43abbf9671584e67f"}, - {file = "pydantic-1.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:dba49d52500c35cfec0b28aa8b3ea5c37c9df183ffc7210b10ff2a415c125c4a"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c66609e138c31cba607d8e2a7b6a5dc38979a06c900815495b2d90ce6ded35b4"}, - {file = "pydantic-1.10.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d986e115e0b39604b9eee3507987368ff8148222da213cd38c359f6f57b3b347"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:646b2b12df4295b4c3148850c85bff29ef6d0d9621a8d091e98094871a62e5c7"}, - {file = "pydantic-1.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282613a5969c47c83a8710cc8bfd1e70c9223feb76566f74683af889faadc0ea"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:466669501d08ad8eb3c4fecd991c5e793c4e0bbd62299d05111d4f827cded64f"}, - {file = "pydantic-1.10.14-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:13e86a19dca96373dcf3190fcb8797d40a6f12f154a244a8d1e8e03b8f280593"}, - {file = "pydantic-1.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:08b6ec0917c30861e3fe71a93be1648a2aa4f62f866142ba21670b24444d7fd8"}, - {file = "pydantic-1.10.14-py3-none-any.whl", hash = "sha256:8ee853cd12ac2ddbf0ecbac1c289f95882b2d4482258048079d13be700aa114c"}, - {file = "pydantic-1.10.14.tar.gz", hash = "sha256:46f17b832fe27de7850896f3afee50ea682220dd218f7e9c88d436788419dca6"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:22ed12ee588b1df028a2aa5d66f07bf8f8b4c8579c2e96d5a9c1f96b77f3bb55"}, + {file = "pydantic-1.10.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75279d3cac98186b6ebc2597b06bcbc7244744f6b0b44a23e4ef01e5683cc0d2"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50f1666a9940d3d68683c9d96e39640f709d7a72ff8702987dab1761036206bb"}, + {file = "pydantic-1.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82790d4753ee5d00739d6cb5cf56bceb186d9d6ce134aca3ba7befb1eedbc2c8"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:d207d5b87f6cbefbdb1198154292faee8017d7495a54ae58db06762004500d00"}, + {file = "pydantic-1.10.15-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e49db944fad339b2ccb80128ffd3f8af076f9f287197a480bf1e4ca053a866f0"}, + {file = "pydantic-1.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:d3b5c4cbd0c9cb61bbbb19ce335e1f8ab87a811f6d589ed52b0254cf585d709c"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c3d5731a120752248844676bf92f25a12f6e45425e63ce22e0849297a093b5b0"}, + {file = "pydantic-1.10.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c365ad9c394f9eeffcb30a82f4246c0006417f03a7c0f8315d6211f25f7cb654"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3287e1614393119c67bd4404f46e33ae3be3ed4cd10360b48d0a4459f420c6a3"}, + {file = "pydantic-1.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be51dd2c8596b25fe43c0a4a59c2bee4f18d88efb8031188f9e7ddc6b469cf44"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6a51a1dd4aa7b3f1317f65493a182d3cff708385327c1c82c81e4a9d6d65b2e4"}, + {file = "pydantic-1.10.15-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4e316e54b5775d1eb59187f9290aeb38acf620e10f7fd2f776d97bb788199e53"}, + {file = "pydantic-1.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:0d142fa1b8f2f0ae11ddd5e3e317dcac060b951d605fda26ca9b234b92214986"}, + {file = "pydantic-1.10.15-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7ea210336b891f5ea334f8fc9f8f862b87acd5d4a0cbc9e3e208e7aa1775dabf"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3453685ccd7140715e05f2193d64030101eaad26076fad4e246c1cc97e1bb30d"}, + {file = "pydantic-1.10.15-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bea1f03b8d4e8e86702c918ccfd5d947ac268f0f0cc6ed71782e4b09353b26f"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:005655cabc29081de8243126e036f2065bd7ea5b9dff95fde6d2c642d39755de"}, + {file = "pydantic-1.10.15-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:af9850d98fc21e5bc24ea9e35dd80a29faf6462c608728a110c0a30b595e58b7"}, + {file = "pydantic-1.10.15-cp37-cp37m-win_amd64.whl", hash = "sha256:d31ee5b14a82c9afe2bd26aaa405293d4237d0591527d9129ce36e58f19f95c1"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5e09c19df304b8123938dc3c53d3d3be6ec74b9d7d0d80f4f4b5432ae16c2022"}, + {file = "pydantic-1.10.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7ac9237cd62947db00a0d16acf2f3e00d1ae9d3bd602b9c415f93e7a9fc10528"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:584f2d4c98ffec420e02305cf675857bae03c9d617fcfdc34946b1160213a948"}, + {file = "pydantic-1.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbc6989fad0c030bd70a0b6f626f98a862224bc2b1e36bfc531ea2facc0a340c"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d573082c6ef99336f2cb5b667b781d2f776d4af311574fb53d908517ba523c22"}, + {file = "pydantic-1.10.15-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6bd7030c9abc80134087d8b6e7aa957e43d35714daa116aced57269a445b8f7b"}, + {file = "pydantic-1.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:3350f527bb04138f8aff932dc828f154847fbdc7a1a44c240fbfff1b57f49a12"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51d405b42f1b86703555797270e4970a9f9bd7953f3990142e69d1037f9d9e51"}, + {file = "pydantic-1.10.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a980a77c52723b0dc56640ced396b73a024d4b74f02bcb2d21dbbac1debbe9d0"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67f1a1fb467d3f49e1708a3f632b11c69fccb4e748a325d5a491ddc7b5d22383"}, + {file = "pydantic-1.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:676ed48f2c5bbad835f1a8ed8a6d44c1cd5a21121116d2ac40bd1cd3619746ed"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92229f73400b80c13afcd050687f4d7e88de9234d74b27e6728aa689abcf58cc"}, + {file = "pydantic-1.10.15-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2746189100c646682eff0bce95efa7d2e203420d8e1c613dc0c6b4c1d9c1fde4"}, + {file = "pydantic-1.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:394f08750bd8eaad714718812e7fab615f873b3cdd0b9d84e76e51ef3b50b6b7"}, + {file = "pydantic-1.10.15-py3-none-any.whl", hash = "sha256:28e552a060ba2740d0d2aabe35162652c1459a0b9069fe0db7f4ee0e18e74d58"}, + {file = "pydantic-1.10.15.tar.gz", hash = "sha256:ca832e124eda231a60a041da4f013e3ff24949d94a01154b137fc2f2a43c3ffb"}, ] [package.dependencies] @@ -684,17 +685,17 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-mock" -version = "3.12.0" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, - {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, ] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] dev = ["pre-commit", "pytest-asyncio", "tox"] @@ -837,22 +838,20 @@ yaml = ["pyyaml (>=6.0.1)"] [[package]] name = "requests-mock" -version = "1.11.0" +version = "1.12.1" description = "Mock out responses from the requests package" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"}, - {file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"}, + {file = "requests-mock-1.12.1.tar.gz", hash = "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401"}, + {file = "requests_mock-1.12.1-py2.py3-none-any.whl", hash = "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563"}, ] [package.dependencies] -requests = ">=2.3,<3" -six = "*" +requests = ">=2.22,<3" [package.extras] fixture = ["fixtures"] -test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"] [[package]] name = "responses" @@ -876,18 +875,18 @@ tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asy [[package]] name = "setuptools" -version = "69.2.0" +version = "69.5.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, - {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -925,13 +924,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] diff --git a/airbyte-integrations/connectors/source-jira/pyproject.toml b/airbyte-integrations/connectors/source-jira/pyproject.toml index 68e95e2706e4..60a1bd041712 100644 --- a/airbyte-integrations/connectors/source-jira/pyproject.toml +++ b/airbyte-integrations/connectors/source-jira/pyproject.toml @@ -3,7 +3,7 @@ requires = [ "poetry-core>=1.0.0",] build-backend = "poetry.core.masonry.api" [tool.poetry] -version = "1.2.2" +version = "2.0.0" name = "source-jira" description = "Source implementation for Jira." authors = [ "Airbyte ",] diff --git a/airbyte-integrations/connectors/source-jira/source_jira/components/__init__.py b/airbyte-integrations/connectors/source-jira/source_jira/components/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/airbyte-integrations/connectors/source-jira/source_jira/components/extractors.py b/airbyte-integrations/connectors/source-jira/source_jira/components/extractors.py new file mode 100644 index 000000000000..2a30cc666541 --- /dev/null +++ b/airbyte-integrations/connectors/source-jira/source_jira/components/extractors.py @@ -0,0 +1,21 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from dataclasses import dataclass +from typing import Any, List, Mapping + +from airbyte_cdk.sources.declarative.extractors import DpathExtractor +from requests_cache import Response + + +@dataclass +class LabelsRecordExtractor(DpathExtractor): + """ + A custom record extractor is needed to handle cases when records are represented as list of strings insted of dictionaries. + Example: + -> ["label 1", "label 2", ..., "label n"] + <- [{"label": "label 1"}, {"label": "label 2"}, ..., {"label": "label n"}] + """ + + def extract_records(self, response: Response) -> List[Mapping[str, Any]]: + records = super().extract_records(response) + return [{"label": record} for record in records] diff --git a/airbyte-integrations/connectors/source-jira/source_jira/components/partition_routers.py b/airbyte-integrations/connectors/source-jira/source_jira/components/partition_routers.py new file mode 100644 index 000000000000..d5f27802ff3c --- /dev/null +++ b/airbyte-integrations/connectors/source-jira/source_jira/components/partition_routers.py @@ -0,0 +1,104 @@ +# +# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# + +from dataclasses import dataclass +from typing import Any, Iterable, Mapping + +import dpath.util +from airbyte_cdk.models import AirbyteMessage, SyncMode, Type +from airbyte_cdk.sources.declarative.partition_routers.substream_partition_router import SubstreamPartitionRouter +from airbyte_cdk.sources.declarative.types import Record, StreamSlice + + +@dataclass +class SubstreamPartitionRouterWithContext(SubstreamPartitionRouter): + """ + It is impossible to pass additional data from the parent record to subsequent stream slices. + So, in this customization, we have prepared a small fix by setting the parent record data as an stream_slice.parent_record attribute + """ + + def stream_slices(self) -> Iterable[StreamSlice]: + if not self.parent_stream_configs: + yield from [] + else: + for parent_stream_config in self.parent_stream_configs: + parent_stream = parent_stream_config.stream + parent_field = parent_stream_config.parent_key.eval(self.config) # type: ignore # parent_key is always casted to an interpolated string + partition_field = parent_stream_config.partition_field.eval(self.config) # type: ignore # partition_field is always casted to an interpolated string + for parent_stream_slice in parent_stream.stream_slices( + sync_mode=SyncMode.full_refresh, cursor_field=None, stream_state=None + ): + empty_parent_slice = True + parent_partition = parent_stream_slice.partition if parent_stream_slice else {} + + for parent_record in parent_stream.read_records( + sync_mode=SyncMode.full_refresh, cursor_field=None, stream_slice=parent_stream_slice, stream_state=None + ): + # Skip non-records (eg AirbyteLogMessage) + if isinstance(parent_record, AirbyteMessage): + if parent_record.type == Type.RECORD: + parent_record = parent_record.record.data + else: + continue + elif isinstance(parent_record, Record): + parent_record = parent_record.data + try: + partition_value = dpath.util.get(parent_record, parent_field) + except KeyError: + pass + else: + empty_parent_slice = False + stream_slice = StreamSlice( + partition={partition_field: partition_value, "parent_slice": parent_partition}, cursor_slice={} + ) + setattr(stream_slice, "parent_record", parent_record) + yield stream_slice + # If the parent slice contains no records, + if empty_parent_slice: + yield from [] + + +class SprintIssuesSubstreamPartitionRouter(SubstreamPartitionRouter): + """ + We often require certain data to be fully retrieved from the parent stream before we begin requesting data from the child stream. + In this custom component, we execute stream slices twice: first, we retrieve all the parent_stream_fields, + and then we call stream slices again, this time with the previously fetched fields. + """ + + def __post_init__(self, parameters: Mapping[str, Any]) -> None: + super().__post_init__(parameters) + fields_parent_stream_config, *parent_stream_configs = self.parent_stream_configs + self.fields_parent_stream_config = fields_parent_stream_config + self.parent_stream_configs = parent_stream_configs + + def stream_slices(self) -> Iterable[StreamSlice]: + self.parent_stream_configs, parent_stream_configs = [self.fields_parent_stream_config], self.parent_stream_configs + fields = [s.partition[self.fields_parent_stream_config.partition_field.eval(self.config)] for s in super().stream_slices()] + fields += ["key", "status", "created", "updated"] + self.parent_stream_configs = parent_stream_configs + for stream_slice in super().stream_slices(): + setattr(stream_slice, "parent_stream_fields", fields) + yield stream_slice + + +class SubstreamOrSinglePartitionRouter(SubstreamPartitionRouter): + """ + Depending on the configuration option, we may or may not need to iterate over the parent stream. + By default, if no projects are set, the child stream should produce records as a normal stream without the parent stream. + + If we do not specify a project in a child stream, it means we are requesting information for all of them, + so there is no need to slice by all the projects and request data as many times as we have projects one by one. + That's why an empty slice is returned. + + If projects are defined in the configuration, + we need to iterate over the given projects and provide a child stream with a slice per project so that it can make a query per project. + + Therefore, if the option is not set, it does not necessarily mean there is no data. + """ + + def stream_slices(self) -> Iterable[StreamSlice]: + if self.config.get("projects"): + yield from super().stream_slices() + else: + yield from [StreamSlice(partition={}, cursor_slice={})] diff --git a/airbyte-integrations/connectors/source-jira/source_jira/components/transformations.py b/airbyte-integrations/connectors/source-jira/source_jira/components/transformations.py new file mode 100644 index 000000000000..1b2be1f7b6e5 --- /dev/null +++ b/airbyte-integrations/connectors/source-jira/source_jira/components/transformations.py @@ -0,0 +1,25 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from dataclasses import InitVar, dataclass +from typing import Any, Mapping, Optional + +from airbyte_cdk.sources.declarative.transformations import RecordTransformation +from airbyte_cdk.sources.declarative.types import Config, FieldPointer, StreamSlice, StreamState + + +@dataclass +class RemoveEmptyFields(RecordTransformation): + field_pointers: FieldPointer + parameters: InitVar[Mapping[str, Any]] + + def transform( + self, + record: Mapping[str, Any], + config: Optional[Config] = None, + stream_state: Optional[StreamState] = None, + stream_slice: Optional[StreamSlice] = None, + ) -> Mapping[str, Any]: + for pointer in self.field_pointers: + if pointer in record: + record[pointer] = {k: v for k, v in record[pointer].items() if v is not None} + return record diff --git a/airbyte-integrations/connectors/source-jira/source_jira/config_migrations.py b/airbyte-integrations/connectors/source-jira/source_jira/config_migrations.py deleted file mode 100644 index 5667cdea454f..000000000000 --- a/airbyte-integrations/connectors/source-jira/source_jira/config_migrations.py +++ /dev/null @@ -1,102 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import logging -from typing import Any, List, Mapping - -from airbyte_cdk.config_observation import create_connector_config_control_message -from airbyte_cdk.entrypoint import AirbyteEntrypoint -from airbyte_cdk.sources import Source -from airbyte_cdk.sources.message import InMemoryMessageRepository, MessageRepository - -logger = logging.getLogger("airbyte") - - -class MigrateIssueExpandProperties: - """ - This class stands for migrating the config at runtime, - while providing the backward compatibility when falling back to the previous source version. - - Specifically, starting from `0.6.1`, the `issues_stream_expand` property should be like : - > List("renderedFields", "transitions", "changelog" ...) - instead of, in `0.6.0`: - > expand_issue_changelog: bool: True - > render_fields: bool: True - > expand_issue_transition: bool: True - """ - - message_repository: MessageRepository = InMemoryMessageRepository() - migrate_from_keys_map: dict = { - "expand_issue_changelog": "changelog", - "render_fields": "renderedFields", - "expand_issue_transition": "transitions", - } - migrate_to_key: str = "issues_stream_expand_with" - - @classmethod - def should_migrate(cls, config: Mapping[str, Any]) -> bool: - """ - This method determines whether the config should be migrated to have the new structure for the `issues_stream_expand_with`, - based on the source spec. - Returns: - > True, if the transformation is necessary - > False, otherwise. - > Raises the Exception if the structure could not be migrated. - """ - # If the config was already migrated, there is no need to do this again. - # but if the customer has already switched to the new version, - # corrected the old config and switches back to the new version, - # we should try to migrate the modified old issue expand properties. - if cls.migrate_to_key in config: - return not len(config[cls.migrate_to_key]) > 0 - - if any(config.get(key) for key in cls.migrate_from_keys_map): - return True - return False - - @classmethod - def transform_to_array(cls, config: Mapping[str, Any]) -> Mapping[str, Any]: - # assign old values to new property that will be used within the new version - config[cls.migrate_to_key] = [] - for k, v in cls.migrate_from_keys_map.items(): - if config.get(k): - config[cls.migrate_to_key].append(v) - # transform boolean flags to `list` of objects - return config - - @classmethod - def modify_and_save(cls, config_path: str, source: Source, config: Mapping[str, Any]) -> Mapping[str, Any]: - # modify the config - migrated_config = cls.transform_to_array(config) - # save the config - source.write_config(migrated_config, config_path) - # return modified config - return migrated_config - - @classmethod - def emit_control_message(cls, migrated_config: Mapping[str, Any]) -> None: - # add the Airbyte Control Message to message repo - cls.message_repository.emit_message(create_connector_config_control_message(migrated_config)) - # emit the Airbyte Control Message from message queue to stdout - for message in cls.message_repository._message_queue: - print(message.json(exclude_unset=True)) - - @classmethod - def migrate(cls, args: List[str], source: Source) -> None: - """ - This method checks the input args, should the config be migrated, - transform if necessary and emit the CONTROL message. - """ - # get config path - config_path = AirbyteEntrypoint(source).extract_config(args) - # proceed only if `--config` arg is provided - if config_path: - # read the existing config - config = source.read_config(config_path) - # migration check - if cls.should_migrate(config): - cls.emit_control_message( - cls.modify_and_save(config_path, source, config), - ) diff --git a/airbyte-integrations/connectors/source-jira/source_jira/manifest.yaml b/airbyte-integrations/connectors/source-jira/source_jira/manifest.yaml new file mode 100644 index 000000000000..d76c7a141466 --- /dev/null +++ b/airbyte-integrations/connectors/source-jira/source_jira/manifest.yaml @@ -0,0 +1,1333 @@ +version: 0.72.1 +type: DeclarativeSource + +definitions: + # Authenticators + + authenticator: + type: BasicHttpAuthenticator + username: "{{ config['email'] }}" + password: "{{ config['api_token'] }}" + + # Error handler + error_handler: + type: DefaultErrorHandler + max_retries: 10 + response_filters: + - http_codes: [400] + action: IGNORE + + # Requesters + + requester: + type: HttpRequester + http_method: GET + url_base: "https://{{ config['domain'] }}/rest/api/3/" + authenticator: "#/definitions/authenticator" + error_handler: "#/definitions/error_handler" + + requester_v1: + $ref: "#/definitions/requester" + url_base: "https://{{ config['domain'] }}/rest/agile/1.0/" + + # Selector + + selector: + type: RecordSelector + extractor: + type: DpathExtractor + field_path: ["{{ parameters.extract_field }}"] + + # Paginator + + no_pagination_paginator: + type: NoPagination + + paginator: + type: "DefaultPaginator" + page_size_option: + type: RequestOption + inject_into: request_parameter + field_name: maxResults + page_token_option: + type: RequestOption + inject_into: "request_parameter" + field_name: "startAt" + # Why not OffsetPagination: + # Due to the record_filter, we may end up with a reduced number of records. + # For instance, a response has 50 records and the page_size option is also set to 50, + # but the record_filter results in filtering out 40 records, leaving us with only 10. + # During the calculation of the next_page_token, there is a check to ensure that + # the number of records is not less than the given page_size. + # If this condition is not met, None is returned, resulting in end of pagination. + pagination_strategy: + type: CursorPagination + cursor_value: "{{ response.get('startAt') + response['maxResults'] }}" + stop_condition: "{{ response.get('isLast') or response.get('startAt') + response.get('maxResults') >= response.get('total') }}" + page_size: 50 + + # Retriever + + retriever: + record_selector: "#/definitions/selector" + requester: "#/definitions/requester" + paginator: "#/definitions/paginator" + + retriever_no_pagination: + $ref: "#/definitions/retriever" + paginator: "#/definitions/no_pagination_paginator" + + retriever_use_cache: + $ref: "#/definitions/retriever" + requester: + $ref: "#/definitions/retriever/requester" + use_cache: true + + retriever_no_pagination_use_cache: + $ref: "#/definitions/retriever_use_cache" + paginator: "#/definitions/no_pagination_paginator" + + retriever_v1: + $ref: "#/definitions/retriever" + requester: "#/definitions/requester_v1" + + retriever_v1_use_cache: + $ref: "#/definitions/retriever_v1" + requester: + $ref: "#/definitions/retriever_v1/requester" + use_cache: true + + # Service Streams + + full_refresh_stream: + type: DeclarativeStream + retriever: "#/definitions/retriever" + + # Full Refresh Streams + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-application-roles/#api-rest-api-3-applicationrole-get + application_roles_stream: + $ref: "#/definitions/full_refresh_stream" + name: application_roles + primary_key: "key" + retriever: "#/definitions/retriever_no_pagination" + $parameters: + path: "applicationrole" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-avatars/#api-rest-api-3-avatar-type-system-get + avatars_stream: + $ref: "#/definitions/full_refresh_stream" + name: avatars + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: ListPartitionRouter + cursor_field: "slice" + values: + - issuetype + - project + - user + $parameters: + path: "avatar/{{ stream_slice.slice }}/system" + extract_field: "system" + + # https://developer.atlassian.com/cloud/jira/software/rest/api-group-other-operations/#api-agile-1-0-board-get + boards_stream: + $ref: "#/definitions/full_refresh_stream" + name: boards + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_v1_use_cache" + record_selector: + $ref: "#/definitions/selector" + record_filter: + condition: "{{ not config.get('projects') or record.get('location', {}).get('projectKey') in config['projects'] }}" + transformations: + - type: AddFields + fields: + - path: ["projectId"] + value_type: string + value: "{{ record.get('location', {}).get('projectId', '') }}" + - path: ["projectKey"] + value_type: string + value: "{{ record.get('location', {}).get('projectKey', '') }}" + $parameters: + path: "board" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-dashboards/#api-rest-api-3-dashboard-get + dashboards_stream: + $ref: "#/definitions/full_refresh_stream" + name: dashboards + primary_key: "id" + $parameters: + path: "dashboard" + extract_field: "dashboards" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-filters/#api-rest-api-3-filter-search-get + filters_stream: + $ref: "#/definitions/full_refresh_stream" + name: filters + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_use_cache" + requester: + $ref: "#/definitions/retriever_use_cache/requester" + request_parameters: + expand: "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,isWritable,subscriptions" + $parameters: + path: "filter/search" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-groups/#api-rest-api-3-group-bulk-get + groups_stream: + $ref: "#/definitions/full_refresh_stream" + name: groups + primary_key: "groupId" + $parameters: + path: "group/bulk" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-fields/#api-rest-api-3-field-get + issue_fields_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_fields + primary_key: "id" + retriever: "#/definitions/retriever_no_pagination_use_cache" + $parameters: + path: "field" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-field-configurations/#api-rest-api-3-fieldconfiguration-get + issue_field_configurations_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_field_configurations + primary_key: "id" + $parameters: + path: "fieldconfiguration" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-link-types/#api-rest-api-3-issuelinktype-get + issue_link_types_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: issue_link_types + primary_key: "id" + $parameters: + path: "issueLinkType" + extract_field: "issueLinkTypes" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-navigator-settings/#api-rest-api-3-settings-columns-get + issue_navigator_settings_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: issue_navigator_settings + $parameters: + path: "settings/columns" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-notification-schemes/#api-rest-api-3-notificationscheme-get + issue_notification_schemes_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_notification_schemes + primary_key: "id" + $parameters: + path: "notificationscheme" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-priorities/#api-rest-api-3-priority-get + issue_priorities_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_priorities + primary_key: "id" + $parameters: + path: "priority/search" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-resolutions/#api-rest-api-3-resolution-search-get + issue_resolutions_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_resolutions + primary_key: "id" + $parameters: + path: "resolution/search" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-security-schemes/#api-rest-api-3-issuesecurityschemes-get + issue_security_schemes_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: issue_security_schemes + primary_key: "id" + $parameters: + path: "issuesecurityschemes" + extract_field: "issueSecuritySchemes" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-types/#api-group-issue-types + issue_types_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: issue_types + primary_key: "id" + $parameters: + path: "issuetype" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-type-schemes/#api-rest-api-3-issuetypescheme-get + issue_type_schemes_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_type_schemes + primary_key: "id" + $parameters: + path: "issuetypescheme" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-type-screen-schemes/#api-rest-api-3-issuetypescreenscheme-get + issue_type_screen_schemes_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_type_screen_schemes + primary_key: "id" + $parameters: + path: "issuetypescreenscheme" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-jira-settings/#api-rest-api-3-application-properties-get + jira_settings_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: jira_settings + primary_key: "id" + $parameters: + path: "application-properties" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-labels/#api-rest-api-3-label-get + labels_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: + $ref: "#/definitions/retriever" + record_selector: + $ref: "#/definitions/retriever/record_selector" + extractor: + type: CustomRecordExtractor + class_name: "source_jira.components.extractors.LabelsRecordExtractor" + field_path: ["values"] + name: labels + primary_key: "label" + $parameters: + path: "label" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-permissions/#api-rest-api-3-permissions-get + permissions_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: + $ref: "#/definitions/retriever_no_pagination" + record_selector: + $ref: "#/definitions/selector" + extractor: + $ref: "#/definitions/selector/extractor" + field_path: + - "permissions" + - "*" + name: permissions + primary_key: "key" + $parameters: + path: "permissions" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-permission-schemes/#api-rest-api-3-permissionscheme-get + permission_schemes_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: permission_schemes + primary_key: "id" + $parameters: + path: "permissionscheme" + extract_field: "permissionSchemes" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-projects/#api-rest-api-3-project-search-get + projects_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: + $ref: "#/definitions/retriever_use_cache" + requester: + $ref: "#/definitions/requester" + request_parameters: + expand: "description,lead" + status: "['live', 'archived', 'deleted']" + record_selector: + $ref: "#/definitions/selector" + record_filter: + condition: "{{ not config.get('projects') or record.get('key') in config['projects'] }}" + name: projects + primary_key: "id" + $parameters: + path: "project/search" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-categories/#api-rest-api-3-projectcategory-get + project_categories_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: + $ref: "#/definitions/retriever_no_pagination" + requester: + $ref: "#/definitions/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400, 403] + action: IGNORE + name: project_categories + primary_key: "id" + $parameters: + path: "projectCategory" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-roles#api-rest-api-3-role-get + project_roles_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: project_roles + primary_key: "id" + $parameters: + path: "role" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-types/#api-rest-api-3-project-type-get + project_types_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: project_types + $parameters: + path: "project/type" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-screens/#api-rest-api-3-screens-get + screens_stream: + $ref: "#/definitions/full_refresh_stream" + name: screens + primary_key: "id" + retriever: "#/definitions/retriever_use_cache" + $parameters: + path: "screens" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-screen-schemes/#api-rest-api-3-screenscheme-get + screen_schemes_stream: + $ref: "#/definitions/full_refresh_stream" + name: screen_schemes + primary_key: "id" + $parameters: + path: "screenscheme" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-time-tracking/#api-rest-api-3-configuration-timetracking-list-get + time_tracking_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: time_tracking + primary_key: "key" + $parameters: + path: "configuration/timetracking/list" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-users/#api-rest-api-3-users-search-get + users_stream: + $ref: "#/definitions/full_refresh_stream" + name: users + primary_key: "accountId" + retriever: + $ref: "#/definitions/retriever_no_pagination_use_cache" + paginator: + $ref: "#/definitions/paginator" + pagination_strategy: + type: OffsetIncrement + page_size: 50 + $parameters: + path: "users/search" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-workflows/#api-rest-api-3-workflow-search-get + workflows_stream: + $ref: "#/definitions/full_refresh_stream" + name: workflows + primary_key: "id" + $parameters: + path: "workflow/search" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-workflow-schemes/#api-rest-api-3-workflowscheme-get + workflow_schemes_stream: + $ref: "#/definitions/full_refresh_stream" + name: workflow_schemes + primary_key: "id" + $parameters: + path: "workflowscheme" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-workflow-statuses/#api-rest-api-3-status-get + workflow_statuses_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: workflow_statuses + primary_key: "id" + $parameters: + path: "status" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-workflow-status-categories/#api-rest-api-3-statuscategory-get + workflow_status_categories_stream: + $ref: "#/definitions/full_refresh_stream" + retriever: "#/definitions/retriever_no_pagination" + name: workflow_status_categories + primary_key: "id" + $parameters: + path: "statuscategory" + extract_field: "*" + + # Substream Based Fullrefresh Streams + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-filter-sharing/#api-rest-api-3-filter-id-permission-get + filter_sharing_stream: + $ref: "#/definitions/full_refresh_stream" + name: filter_sharing + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/filters_stream" + parent_key: "id" + partition_field: "filter_id" + transformations: + - type: AddFields + fields: + - path: ["filterId"] + value_type: string + value: "{{ stream_slice.filter_id }}" + $parameters: + path: "filter/{{ stream_slice.filter_id }}/permission" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-fields/#api-rest-api-3-field-get + __custom_issue_fields_substream: + $ref: "#/definitions/issue_fields_stream" + retriever: + $ref: "#/definitions/retriever_no_pagination_use_cache" + record_selector: + $ref: "#/definitions/selector" + record_filter: + condition: "{{ record.get('custom', False) }}" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-custom-field-contexts/#api-rest-api-3-field-fieldid-context-get + issue_custom_field_contexts_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_custom_field_contexts + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_use_cache" + partition_router: + type: CustomPartitionRouter + class_name: "source_jira.components.partition_routers.SubstreamPartitionRouterWithContext" + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/__custom_issue_fields_substream" + parent_key: "id" + partition_field: "field_id" + requester: + $ref: "#/definitions/retriever_use_cache/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400, 403, 404] + action: IGNORE + transformations: + - type: AddFields + fields: + - path: ["fieldId"] + value_type: string + value: "{{ stream_slice.field_id }}" + - path: ["fieldType"] + value_type: string + value: "{{ stream_slice.parent_record.schema.type }}" + $parameters: + path: "field/{{ stream_slice.field_id }}/context" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-custom-field-contexts/#api-rest-api-3-field-fieldid-context-get + __issue_custom_field_contexts_substream: + $ref: "#/definitions/issue_custom_field_contexts_stream" + retriever: + $ref: "#/definitions/issue_custom_field_contexts_stream/retriever" + record_selector: + $ref: "#/definitions/selector" + record_filter: + condition: "{{ stream_slice.parent_record.schema.type == 'option' }}" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-custom-field-options/#api-rest-api-3-field-fieldid-context-contextid-option-get + issue_custom_field_options_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_custom_field_options + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_use_cache" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/__issue_custom_field_contexts_substream" + parent_key: "id" + partition_field: "context_id" + requester: + $ref: "#/definitions/retriever_use_cache/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400, 403, 404] + action: IGNORE + transformations: + - type: AddFields + fields: + - path: ["fieldId"] + value_type: string + value: "{{ stream_slice.parent_slice.field_id }}" + - path: ["contextId"] + value_type: string + value: "{{ stream_slice.context_id }}" + $parameters: + path: "field/{{ stream_slice.parent_slice.field_id }}/context/{{ stream_slice.context_id }}/option" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-properties/#api-rest-api-3-issue-issueidorkey-properties-get + __issue_property_keys_substream: + $ref: "#/definitions/full_refresh_stream" + name: issue_property_keys + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_no_pagination_use_cache" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/issues_stream" + parent_key: "key" + partition_field: "issue_property_key" + requester: + $ref: "#/definitions/retriever_use_cache/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400, 404] + action: IGNORE + $parameters: + path: "issue/{{ stream_slice.issue_property_key }}/properties" + extract_field: "keys" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-properties/#api-rest-api-3-issue-issueidorkey-properties-propertykey-get + issue_properties_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_properties + primary_key: "key" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/__issue_property_keys_substream" + parent_key: "key" + partition_field: "property_key" + record_selector: + $ref: "#/definitions/selector" + extractor: + type: DpathExtractor + field_path: [] + transformations: + - type: AddFields + fields: + - path: ["issueId"] + value_type: string + value: "{{ stream_slice.parent_slice.issue_property_key }}" + $parameters: + path: "issue/{{ stream_slice.parent_slice.issue_property_key }}/properties/{{ stream_slice.property_key }}" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-remote-links/#api-rest-api-3-issue-issueidorkey-remotelink-get + issue_remote_links_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_remote_links + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/issues_stream" + parent_key: "key" + partition_field: "issue_property_key" + transformations: + - type: AddFields + fields: + - path: ["issueId"] + value_type: string + value: "{{ stream_slice.issue_property_key }}" + $parameters: + path: "issue/{{ stream_slice.issue_property_key }}/remotelink" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issues/#api-rest-api-3-issue-issueidorkey-transitions-get + issue_transitions_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_transitions + primary_key: + - "issueId" + - "id" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/issues_stream" + parent_key: "key" + partition_field: "issue_property_key" + transformations: + - type: AddFields + fields: + - path: ["issueId"] + value_type: string + value: "{{ stream_slice.issue_property_key }}" + $parameters: + path: "issue/{{ stream_slice.issue_property_key }}/transitions" + extract_field: "transitions" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-votes/#api-rest-api-3-issue-issueidorkey-votes-get + issue_votes_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_votes + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/issues_stream" + parent_key: "key" + partition_field: "issue_property_key" + record_selector: + $ref: "#/definitions/selector" + extractor: + type: DpathExtractor + field_path: [] + transformations: + - type: AddFields + fields: + - path: ["issueId"] + value_type: string + value: "{{ stream_slice.issue_property_key }}" + $parameters: + path: "issue/{{ stream_slice.issue_property_key }}/votes" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-watchers/#api-rest-api-3-issue-issueidorkey-watchers-get + issue_watchers_stream: + $ref: "#/definitions/full_refresh_stream" + name: issue_watchers + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/issues_stream" + parent_key: "key" + partition_field: "issue_property_key" + record_selector: + $ref: "#/definitions/selector" + extractor: + type: DpathExtractor + field_path: [] + requester: + $ref: "#/definitions/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400, 404] + action: IGNORE + transformations: + - type: AddFields + fields: + - path: ["issueId"] + value_type: string + value: "{{ stream_slice.issue_property_key }}" + $parameters: + path: "issue/{{ stream_slice.issue_property_key }}/watchers" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-avatars/#api-rest-api-3-project-projectidorkey-avatars-get + project_avatars_stream: + $ref: "#/definitions/full_refresh_stream" + name: project_avatars + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/projects_stream" + parent_key: "id" + partition_field: "project_id" + record_selector: + $ref: "#/definitions/selector" + extractor: + type: DpathExtractor + field_path: ["*", "*"] + requester: + $ref: "#/definitions/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400, 404] + action: IGNORE + transformations: + - type: AddFields + fields: + - path: ["projectId"] + value_type: string + value: "{{ stream_slice.project_id }}" + $parameters: + path: "project/{{ stream_slice.project_id }}/avatars" + extract_field: "*/*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-components/#api-rest-api-3-project-projectidorkey-component-get + project_components_stream: + $ref: "#/definitions/full_refresh_stream" + name: project_components + primary_key: "id" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/projects_stream" + parent_key: "key" + partition_field: "project_key" + $parameters: + path: "project/{{ stream_slice.project_key }}/component" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-email/#api-rest-api-3-project-projectid-email-get + project_email_stream: + $ref: "#/definitions/full_refresh_stream" + name: project_email + primary_key: "projectId" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/projects_stream" + parent_key: "id" + partition_field: "project_id" + record_selector: + $ref: "#/definitions/selector" + extractor: + type: DpathExtractor + field_path: [] + requester: + $ref: "#/definitions/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400, 403] + action: IGNORE + transformations: + - type: AddFields + fields: + - path: ["projectId"] + value_type: string + value: "{{ stream_slice.project_id }}" + $parameters: + path: "project/{{ stream_slice.project_id }}/email" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-permission-schemes/#api-rest-api-3-project-projectkeyorid-securitylevel-get + project_permission_schemes_stream: + $ref: "#/definitions/full_refresh_stream" + name: project_permission_schemes + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/projects_stream" + parent_key: "key" + partition_field: "project_key" + transformations: + - type: AddFields + fields: + - path: ["projectId"] + value_type: string + value: "{{ stream_slice.project_key }}" + $parameters: + path: "project/{{ stream_slice.project_key }}/securitylevel" + extract_field: "levels" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-versions/#api-rest-api-3-project-projectidorkey-version-get + project_versions_stream: + $ref: "#/definitions/full_refresh_stream" + name: project_versions + primary_key: "id" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/projects_stream" + parent_key: "key" + partition_field: "project_key" + $parameters: + path: "project/{{ stream_slice.project_key }}/version" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-screen-tabs/#api-rest-api-3-screens-screenid-tabs-get + screen_tabs_stream: + $ref: "#/definitions/full_refresh_stream" + name: screen_tabs + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_no_pagination_use_cache" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/screens_stream" + parent_key: "id" + partition_field: "screen_id" + requester: + $ref: "#/definitions/retriever_use_cache/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400] + # This is not displayed for some reason + error_message: "{{ ','.join(response.get('errorMessages', [''])) }}" + action: IGNORE + transformations: + - type: AddFields + fields: + - path: ["screenId"] + value_type: integer + value: "{{ stream_slice.screen_id }}" + $parameters: + path: "screens/{{ stream_slice.screen_id }}/tabs" + extract_field: "*" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-screen-tab-fields/#api-rest-api-3-screens-screenid-tabs-tabid-fields-get + screen_tab_fields_stream: + $ref: "#/definitions/full_refresh_stream" + name: screen_tab_fields + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/screen_tabs_stream" + parent_key: "id" + partition_field: "tab_id" + transformations: + - type: AddFields + fields: + - path: ["tabId"] + value_type: integer + value: "{{ stream_slice.tab_id }}" + - path: ["screenId"] + value_type: integer + value: "{{ stream_slice.parent_slice.screen_id }}" + $parameters: + path: "screens/{{ stream_slice.parent_slice.screen_id }}/tabs/{{ stream_slice.tab_id }}/fields" + extract_field: "*" + + __boards_substream: + $ref: "#/definitions/boards_stream" + retriever: + $ref: "#/definitions/boards_stream/retriever" + record_selector: + $ref: "#/definitions/selector" + record_filter: + condition: "{{ record.type in ['scrum', 'simple'] and (not config.get('projects') or record.get('location', {}).get('projectKey') in config['projects']) }}" + + # https://developer.atlassian.com/cloud/jira/software/rest/api-group-board/#api-rest-agile-1-0-board-boardid-sprint-get + sprints_stream: + $ref: "#/definitions/full_refresh_stream" + name: sprints + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_v1_use_cache" + requester: + $ref: "#/definitions/retriever_v1_use_cache/requester" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - type: HttpResponseFilter + http_codes: [400] + action: IGNORE + # Even though the error_message is defined, it does not work; None is returned + error_message: > + "The board does not support sprints. The board does not have a sprint board. if it's a team-managed one, " + "does it have sprints enabled under project settings? If it's a company-managed one, " + "check that it has at least one Scrum board associated with it." + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/__boards_substream" + parent_key: "id" + partition_field: "board_id" + transformations: + - type: AddFields + fields: + - path: ["boardId"] + value_type: integer + value: "{{ stream_slice.board_id }}" + $parameters: + path: "board/{{ stream_slice.board_id }}/sprint" + extract_field: "values" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-users/#api-rest-api-3-user-get + users_groups_detailed_stream: + $ref: "#/definitions/full_refresh_stream" + name: users_groups_detailed + primary_key: "accountId" + retriever: + $ref: "#/definitions/retriever_no_pagination" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/users_stream" + parent_key: "accountId" + partition_field: "user_account_id" + requester: + $ref: "#/definitions/requester" + request_parameters: + accountId: "{{ stream_slice.user_account_id }}" + expand: "groups,applicationRoles" + record_selector: + $ref: "#/definitions/selector" + extractor: + type: DpathExtractor + field_path: [] + $parameters: + path: "user" + extract_field: "*" + + # Incremental Streams + + semi_incremental_retriever: + $ref: "#/definitions/retriever" + record_selector: + $ref: "#/definitions/selector" + record_filter: + condition: "{{ record['updated'] >= stream_slice['start_time'] }}" + + incremental_sync: + type: DatetimeBasedCursor + cursor_field: "updated" + start_datetime: "{{ config.get('start_date', '1970-01-01T00:00:00Z') }}" + datetime_format: "%Y-%m-%dT%H:%M:%SZ" + cursor_datetime_formats: + - "%Y-%m-%dT%H:%M:%S.%f%z" + lookback_window: "PT{{ config.get('lookback_window_minutes', '0') }}M" + + incremental_stream: + type: DeclarativeStream + retriever: "#/definitions/retriever" + incremental_sync: "#/definitions/incremental_sync" + + semi_incremental_stream: + $ref: "#/definitions/incremental_stream" + retriever: "#/definitions/semi_incremental_retriever" + + # https://developer.atlassian.com/cloud/jira/software/rest/api-group-board/#api-rest-agile-1-0-board-boardid-issue-get + board_issues_stream: + $ref: "#/definitions/incremental_stream" + name: board_issues + primary_key: "id" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: SubstreamPartitionRouter + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/boards_stream" + parent_key: "id" + partition_field: "board_id" + requester: + $ref: "#/definitions/requester_v1" + request_parameters: + fields: "['key', 'created', 'updated']" + jql: "updated >= '{{ format_datetime(stream_slice.start_time, '%Y/%m/%d %H:%M') }}'" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [500] + action: IGNORE + transformations: + - type: AddFields + fields: + - path: ["boardId"] + value_type: integer + value: "{{ stream_slice.board_id }}" + - path: ["created"] + value_type: string + value: "{{ record.fields.created }}" + - path: ["updated"] + value_type: string + value: "{{ record.fields.updated }}" + $parameters: + path: "board/{{ stream_slice.board_id }}/issue" + extract_field: "issues" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-search/#api-rest-api-3-search-get + issues_stream: + $ref: "#/definitions/incremental_stream" + name: issues + primary_key: "id" + retriever: + $ref: "#/definitions/retriever" + partition_router: + type: CustomPartitionRouter + class_name: "source_jira.components.partition_routers.SubstreamOrSinglePartitionRouter" + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/projects_stream" + parent_key: "id" + partition_field: "project_id" + requester: + $ref: "#/definitions/requester" + request_parameters: + fields: "*all" + jql: > + "updated >= '{{ format_datetime(stream_slice.start_time, '%Y/%m/%d %H:%M') }}' " + "{{ ('and project in ' + '(' + stream_slice.project_id + ') ') if stream_slice.project_id else '' }}" + "ORDER BY updated asc" + expand: "renderedFields,transitions,changelog" + error_handler: + $ref: "#/definitions/error_handler" + response_filters: + - http_codes: [400] + error_message: "The user doesn't have permission to the project. Please grant the user to the project." + action: IGNORE + transformations: + - type: AddFields + fields: + - path: ["projectId"] + value_type: string + value: "{{ record.fields.project.id }}" + - path: ["projectKey"] + value_type: string + value: "{{ record.fields.project.key }}" + - path: ["created"] + value_type: string + value: "{{ record.fields.created }}" + - path: ["updated"] + value_type: string + value: "{{ record.fields.updated }}" + - type: CustomTransformation + class_name: "source_jira.components.transformations.RemoveEmptyFields" + field_pointers: ["renderedFields", fields] + + $parameters: + path: "search" + extract_field: "issues" + + # https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-fields/#api-rest-api-3-field-get + __story_points_issue_fields_substream: + $ref: "#/definitions/issue_fields_stream" + retriever: + $ref: "#/definitions/retriever_no_pagination_use_cache" + record_selector: + $ref: "#/definitions/selector" + record_filter: + condition: "{{ record.name in ['Story Points', 'Story point estimate'] }}" + + # https://developer.atlassian.com/cloud/jira/software/rest/api-group-sprint/#api-rest-agile-1-0-sprint-sprintid-issue-get + sprint_issues_stream: + $ref: "#/definitions/incremental_stream" + name: sprint_issues + primary_key: "id" + retriever: + $ref: "#/definitions/retriever_v1" + partition_router: + type: CustomPartitionRouter + class_name: "source_jira.components.partition_routers.SprintIssuesSubstreamPartitionRouter" + parent_stream_configs: + - type: ParentStreamConfig + stream: "#/definitions/__story_points_issue_fields_substream" + parent_key: "id" + partition_field: "field_id" + - type: ParentStreamConfig + stream: "#/definitions/sprints_stream" + parent_key: "id" + partition_field: "sprint_id" + requester: + $ref: "#/definitions/retriever_v1/requester" + request_parameters: + fields: "{{ stream_slice._partition.parent_stream_fields }}" + jql: "updated >= '{{ format_datetime(stream_slice.start_time, '%Y/%m/%d %H:%M') }}'" + transformations: + - type: AddFields + fields: + - path: ["issueId"] + value_type: string + value: "{{ record.id }}" + - path: ["id"] + value_type: string + value: "{{ stream_slice.sprint_id }}-{{ record.id }}" + - path: ["sprintId"] + value_type: integer + value: "{{ stream_slice.sprint_id }}" + - path: ["created"] + value_type: string + value: "{{ record.fields.created }}" + - path: ["updated"] + value_type: string + value: "{{ record.fields.updated }}" + $parameters: + path: "sprint/{{ stream_slice.sprint_id }}/issue" + extract_field: "issues" + +streams: + # Full refresh streams + + - "#/definitions/application_roles_stream" + - "#/definitions/avatars_stream" + - "#/definitions/boards_stream" + - "#/definitions/dashboards_stream" + - "#/definitions/filters_stream" + - "#/definitions/groups_stream" + - "#/definitions/issue_fields_stream" + - "#/definitions/issue_field_configurations_stream" + - "#/definitions/issue_link_types_stream" + - "#/definitions/issue_navigator_settings_stream" + - "#/definitions/issue_notification_schemes_stream" + - "#/definitions/issue_priorities_stream" + - "#/definitions/issue_resolutions_stream" + - "#/definitions/issue_security_schemes_stream" + - "#/definitions/issue_types_stream" + - "#/definitions/issue_type_schemes_stream" + - "#/definitions/issue_type_screen_schemes_stream" + - "#/definitions/jira_settings_stream" + - "#/definitions/labels_stream" + - "#/definitions/permissions_stream" + - "#/definitions/permission_schemes_stream" + - "#/definitions/projects_stream" + - "#/definitions/project_categories_stream" + - "#/definitions/project_roles_stream" + - "#/definitions/project_types_stream" + - "#/definitions/screens_stream" + - "#/definitions/screen_schemes_stream" + - "#/definitions/time_tracking_stream" + - "#/definitions/users_stream" + - "#/definitions/workflows_stream" + - "#/definitions/workflow_schemes_stream" + - "#/definitions/workflow_statuses_stream" + - "#/definitions/workflow_status_categories_stream" + + # Substream based streams + + - "#/definitions/filter_sharing_stream" + - "#/definitions/issue_custom_field_contexts_stream" + - "#/definitions/issue_custom_field_options_stream" + - "#/definitions/issue_properties_stream" + - "#/definitions/issue_remote_links_stream" + - "#/definitions/issue_transitions_stream" + - "#/definitions/issue_votes_stream" + - "#/definitions/issue_watchers_stream" + - "#/definitions/project_avatars_stream" + - "#/definitions/project_components_stream" + - "#/definitions/project_email_stream" + - "#/definitions/project_permission_schemes_stream" + - "#/definitions/project_versions_stream" + - "#/definitions/screen_tabs_stream" + - "#/definitions/screen_tab_fields_stream" + - "#/definitions/sprints_stream" + - "#/definitions/users_groups_detailed_stream" + + # Incremental streams + + - "#/definitions/board_issues_stream" + - "#/definitions/issues_stream" + - "#/definitions/sprint_issues_stream" + +check: + type: CheckStream + stream_names: + - application_roles + - avatars + - boards + - dashboards + - filters + - groups + - issue_fields + - issue_field_configurations + - issue_link_types + - issue_navigator_settings + - issue_notification_schemes + - issue_priorities + - issue_resolutions + - issue_security_schemes + - issue_types + - issue_type_schemes + - issue_type_screen_schemes + - jira_settings + - labels + - permissions + - permission_schemes + - projects + - project_categories + - project_roles + - project_types + - screens + - screen_schemes + - time_tracking + - users + - workflows + - workflow_schemes + - workflow_statuses + - workflow_status_categories + - filter_sharing + - issue_custom_field_contexts + - issue_custom_field_options + - issue_properties + - issue_remote_links + - issue_transitions + - issue_votes + - issue_watchers + - project_avatars + - project_components + - project_email + - project_permission_schemes + - project_versions + - screen_tabs + - screen_tab_fields + - sprints + - users_groups_detailed + - board_issues + - issues + - sprint_issues diff --git a/airbyte-integrations/connectors/source-jira/source_jira/source.py b/airbyte-integrations/connectors/source-jira/source_jira/source.py index a34a74949070..5b4837c21e5b 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/source.py +++ b/airbyte-integrations/connectors/source-jira/source_jira/source.py @@ -1,148 +1,79 @@ # -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. # - -import logging -from typing import Any, List, Mapping, Optional, Tuple +from logging import Logger +from typing import Any, List, Mapping, Tuple import pendulum -import requests -from airbyte_cdk import AirbyteLogger from airbyte_cdk.models import FailureType -from airbyte_cdk.sources import AbstractSource -from airbyte_cdk.sources.streams import Stream +from airbyte_cdk.sources.declarative.exceptions import ReadException +from airbyte_cdk.sources.declarative.yaml_declarative_source import YamlDeclarativeSource +from airbyte_cdk.sources.streams.core import Stream from airbyte_cdk.sources.streams.http.auth import BasicHttpAuthenticator from airbyte_cdk.utils.traced_exception import AirbyteTracedException from pydantic.error_wrappers import ValidationError +from requests.exceptions import InvalidURL -from .streams import ( - ApplicationRoles, - Avatars, - BoardIssues, - Boards, - Dashboards, - Filters, - FilterSharing, - Groups, - IssueComments, - IssueCustomFieldContexts, - IssueCustomFieldOptions, - IssueFieldConfigurations, - IssueFields, - IssueLinkTypes, - IssueNavigatorSettings, - IssueNotificationSchemes, - IssuePriorities, - IssueProperties, - IssueRemoteLinks, - IssueResolutions, - Issues, - IssueSecuritySchemes, - IssueTransitions, - IssueTypes, - IssueTypeSchemes, - IssueTypeScreenSchemes, - IssueVotes, - IssueWatchers, - IssueWorklogs, - JiraSettings, - Labels, - Permissions, - PermissionSchemes, - ProjectAvatars, - ProjectCategories, - ProjectComponents, - ProjectEmail, - ProjectPermissionSchemes, - ProjectRoles, - Projects, - ProjectTypes, - ProjectVersions, - PullRequests, - Screens, - ScreenSchemes, - ScreenTabFields, - ScreenTabs, - SprintIssues, - Sprints, - TimeTracking, - Users, - UsersGroupsDetailed, - Workflows, - WorkflowSchemes, - WorkflowStatusCategories, - WorkflowStatuses, -) +from .streams import IssueComments, IssueFields, Issues, IssueWorklogs, PullRequests from .utils import read_full_refresh -logger = logging.getLogger("airbyte") - - -class SourceJira(AbstractSource): - def _validate_and_transform(self, config: Mapping[str, Any]): - start_date = config.get("start_date") - if start_date: - config["start_date"] = pendulum.parse(start_date) - config["lookback_window_minutes"] = pendulum.duration(minutes=config.get("lookback_window_minutes", 0)) - config["projects"] = config.get("projects", []) - return config - @staticmethod - def get_authenticator(config: Mapping[str, Any]): - return BasicHttpAuthenticator(config["email"], config["api_token"]) +class SourceJira(YamlDeclarativeSource): + def __init__(self): + super().__init__(**{"path_to_yaml": "manifest.yaml"}) - def check_connection(self, logger: AirbyteLogger, config: Mapping[str, Any]) -> Tuple[bool, Optional[Any]]: + def check_connection(self, logger: Logger, config: Mapping[str, Any]) -> Tuple[bool, any]: try: - original_config = config.copy() - config = self._validate_and_transform(config) - authenticator = self.get_authenticator(config) - kwargs = {"authenticator": authenticator, "domain": config["domain"], "projects": config["projects"]} + streams = self.streams(config) + stream_name_to_stream = {s.name: s for s in streams} # check projects - projects_stream = Projects(**kwargs) - projects = {project["key"] for project in read_full_refresh(projects_stream)} - unknown_projects = set(config["projects"]) - projects - if unknown_projects: - return False, "unknown project(s): " + ", ".join(unknown_projects) + if config.get("projects"): + projects_stream = stream_name_to_stream["projects"] + actual_projects = {project["key"] for project in read_full_refresh(projects_stream)} + unknown_projects = set(config["projects"]) - actual_projects + if unknown_projects: + return False, "unknown project(s): " + ", ".join(unknown_projects) # Get streams to check access to any of them - streams = self.streams(original_config) - for stream in streams: + for stream_name in self._source_config["check"]["stream_names"]: try: - next(read_full_refresh(stream), None) + next(read_full_refresh(stream_name_to_stream[stream_name]), None) except: - logger.warning("No access to stream: " + stream.name) + logger.warning(f"No access to stream: {stream_name}") else: - logger.info(f"API Token have access to stream: {stream.name}, so check is successful.") + logger.info(f"API Token have access to stream: {stream_name}, so check is successful.") return True, None return False, "This API Token does not have permission to read any of the resources." - except ValidationError as validation_error: - return False, validation_error - except requests.exceptions.RequestException as request_error: - has_response = request_error.response is not None - is_invalid_domain = ( - isinstance(request_error, requests.exceptions.InvalidURL) - or has_response - and request_error.response.status_code == requests.codes.not_found - ) - - if is_invalid_domain: + except ValidationError as e: + return False, e + except (ReadException, InvalidURL) as e: + if isinstance(e, InvalidURL) or "404" in str(e): raise AirbyteTracedException( message="Config validation error: please check that your domain is valid and does not include protocol (e.g: https://).", - internal_message=str(request_error), + internal_message=str(e), failure_type=FailureType.config_error, ) from None + raise e - # sometimes jira returns non json response - if has_response and request_error.response.headers.get("content-type") == "application/json": - message = " ".join(map(str, request_error.response.json().get("errorMessages", ""))) - return False, f"{message} {request_error}" + def streams(self, config: Mapping[str, Any]) -> List[Stream]: + streams = super().streams(config) + return streams + self.get_non_portable_streams(config=config) + + def _validate_and_transform_config(self, config: Mapping[str, Any]): + start_date = config.get("start_date") + if start_date: + config["start_date"] = pendulum.parse(start_date) + config["lookback_window_minutes"] = pendulum.duration(minutes=config.get("lookback_window_minutes", 0)) + config["projects"] = config.get("projects", []) + return config - # we don't know what this is, rethrow it - raise request_error + @staticmethod + def get_authenticator(config: Mapping[str, Any]): + return BasicHttpAuthenticator(config["email"], config["api_token"]) - def streams(self, config: Mapping[str, Any]) -> List[Stream]: - config = self._validate_and_transform(config) + def get_non_portable_streams(self, config: Mapping[str, Any]) -> List[Stream]: + config = self._validate_and_transform_config(config.copy()) authenticator = self.get_authenticator(config) args = {"authenticator": authenticator, "domain": config["domain"], "projects": config["projects"]} incremental_args = { @@ -152,65 +83,12 @@ def streams(self, config: Mapping[str, Any]) -> List[Stream]: } issues_stream = Issues(**incremental_args) issue_fields_stream = IssueFields(**args) + + streams = [IssueComments(**incremental_args), IssueWorklogs(**incremental_args)] + experimental_streams = [] if config.get("enable_experimental_streams", False): experimental_streams.append( PullRequests(issues_stream=issues_stream, issue_fields_stream=issue_fields_stream, **incremental_args) ) - return [ - ApplicationRoles(**args), - Avatars(**args), - Boards(**args), - BoardIssues(**incremental_args), - Dashboards(**args), - Filters(**args), - FilterSharing(**args), - Groups(**args), - issues_stream, - IssueComments(**incremental_args), - issue_fields_stream, - IssueFieldConfigurations(**args), - IssueCustomFieldContexts(**args), - IssueCustomFieldOptions(**args), - IssueLinkTypes(**args), - IssueNavigatorSettings(**args), - IssueNotificationSchemes(**args), - IssuePriorities(**args), - IssueProperties(**incremental_args), - IssueRemoteLinks(**incremental_args), - IssueResolutions(**args), - IssueSecuritySchemes(**args), - IssueTransitions(**args), - IssueTypeSchemes(**args), - IssueTypes(**args), - IssueTypeScreenSchemes(**args), - IssueVotes(**incremental_args), - IssueWatchers(**incremental_args), - IssueWorklogs(**incremental_args), - JiraSettings(**args), - Labels(**args), - Permissions(**args), - PermissionSchemes(**args), - Projects(**args), - ProjectRoles(**args), - ProjectAvatars(**args), - ProjectCategories(**args), - ProjectComponents(**args), - ProjectEmail(**args), - ProjectPermissionSchemes(**args), - ProjectTypes(**args), - ProjectVersions(**args), - Screens(**args), - ScreenTabs(**args), - ScreenTabFields(**args), - ScreenSchemes(**args), - Sprints(**args), - SprintIssues(**incremental_args), - TimeTracking(**args), - Users(**args), - UsersGroupsDetailed(**args), - Workflows(**args), - WorkflowSchemes(**args), - WorkflowStatuses(**args), - WorkflowStatusCategories(**args), - ] + experimental_streams + return streams + experimental_streams diff --git a/airbyte-integrations/connectors/source-jira/source_jira/streams.py b/airbyte-integrations/connectors/source-jira/source_jira/streams.py index 05c0d8381149..c2c4320e5107 100644 --- a/airbyte-integrations/connectors/source-jira/source_jira/streams.py +++ b/airbyte-integrations/connectors/source-jira/source_jira/streams.py @@ -195,215 +195,6 @@ def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: yield from super().stream_slices(**kwargs) -class ApplicationRoles(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-application-roles/#api-rest-api-3-applicationrole-get - """ - - primary_key = "key" - - def path(self, **kwargs) -> str: - return "applicationrole" - - -class Avatars(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-avatars/#api-rest-api-3-avatar-type-system-get - """ - - extract_field = "system" - avatar_types = ("issuetype", "project", "user") - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"avatar/{stream_slice['avatar_type']}/system" - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - for avatar_type in self.avatar_types: - yield {"avatar_type": avatar_type} - - -class Boards(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/software/rest/api-group-other-operations/#api-agile-1-0-board-get - """ - - extract_field = "values" - use_cache = True - api_v1 = True - - def path(self, **kwargs) -> str: - return "board" - - def read_records(self, **kwargs) -> Iterable[Mapping[str, Any]]: - for board in super().read_records(**kwargs): - location = board.get("location", {}) - if not self._projects or location.get("projectKey") in self._projects: - yield board - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - location = record.get("location") - if location: - record["projectId"] = str(location.get("projectId")) - record["projectKey"] = location.get("projectKey") - return record - - -class BoardIssues(StartDateJiraStream): - """ - https://developer.atlassian.com/cloud/jira/software/rest/api-group-board/#api-rest-agile-1-0-board-boardid-issue-get - """ - - cursor_field = "updated" - extract_field = "issues" - api_v1 = True - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self._starting_point_cache = {} - self.boards_stream = Boards(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"board/{stream_slice['board_id']}/issue" - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any], - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - params["fields"] = ["key", "created", "updated"] - jql = self.jql_compare_date(stream_state, stream_slice) - if jql: - params["jql"] = jql - return params - - def jql_compare_date(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any]) -> Optional[str]: - compare_date = self.get_starting_point(stream_state, stream_slice) - if compare_date: - compare_date = compare_date.strftime("%Y/%m/%d %H:%M") - return f"{self.cursor_field} >= '{compare_date}'" - - def _is_board_error(self, response): - """Check if board has error and should be skipped""" - if response.status_code == 500: - if "This board has no columns with a mapped status." in response.text: - return True - - def should_retry(self, response: requests.Response) -> bool: - if self._is_board_error(response): - return False - - # for all other HTTP errors the default handling is applied - return super().should_retry(response) - - def stream_slices(self, **kwargs) -> Iterable[Optional[Mapping[str, Any]]]: - yield from read_full_refresh(self.boards_stream) - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - try: - yield from super().read_records(stream_slice={"board_id": stream_slice["id"]}, **kwargs) - except HTTPError as e: - if self._is_board_error(e.response): - # Wrong board is skipped - self.logger.warning(f"Board {stream_slice['id']} has no columns with a mapped status. Skipping.") - else: - raise - - def get_updated_state(self, current_stream_state: MutableMapping[str, Any], latest_record: Mapping[str, Any]): - updated_state = latest_record[self.cursor_field] - board_id = str(latest_record["boardId"]) - stream_state_value = current_stream_state.get(board_id, {}).get(self.cursor_field) - if stream_state_value: - updated_state = max(updated_state, stream_state_value) - current_stream_state.setdefault(board_id, {})[self.cursor_field] = updated_state - return current_stream_state - - def get_starting_point(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any]) -> Optional[pendulum.DateTime]: - board_id = str(stream_slice["board_id"]) - if self.cursor_field not in self._starting_point_cache: - self._starting_point_cache.setdefault(board_id, {})[self.cursor_field] = self._get_starting_point( - stream_state=stream_state, stream_slice=stream_slice - ) - return self._starting_point_cache[board_id][self.cursor_field] - - def _get_starting_point(self, stream_state: Mapping[str, Any], stream_slice: Mapping[str, Any]) -> Optional[pendulum.DateTime]: - if stream_state: - board_id = str(stream_slice["board_id"]) - stream_state_value = stream_state.get(board_id, {}).get(self.cursor_field) - if stream_state_value: - stream_state_value = pendulum.parse(stream_state_value) - self._lookback_window_minutes - return safe_max(stream_state_value, self._start_date) - return self._start_date - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["boardId"] = stream_slice["board_id"] - record["created"] = record["fields"]["created"] - record["updated"] = record["fields"]["updated"] - return record - - -class Dashboards(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-dashboards/#api-rest-api-3-dashboard-get - """ - - extract_field = "dashboards" - - def path(self, **kwargs) -> str: - return "dashboard" - - -class Filters(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-filters/#api-rest-api-3-filter-search-get - """ - - extract_field = "values" - use_cache = True - - def path(self, **kwargs) -> str: - return "filter/search" - - def request_params(self, **kwargs) -> MutableMapping[str, Any]: - params = super().request_params(**kwargs) - params["expand"] = "description,owner,jql,viewUrl,searchUrl,favourite,favouritedCount,sharePermissions,isWritable,subscriptions" - return params - - -class FilterSharing(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-filter-sharing/#api-rest-api-3-filter-id-permission-get - """ - - def __init__(self, render_fields: bool = False, **kwargs): - super().__init__(**kwargs) - self.filters_stream = Filters(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"filter/{stream_slice['filter_id']}/permission" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for filters in read_full_refresh(self.filters_stream): - yield from super().read_records(stream_slice={"filter_id": filters["id"]}, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["filterId"] = stream_slice["filter_id"] - return record - - -class Groups(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-groups/#api-rest-api-3-group-bulk-get - """ - - extract_field = "values" - primary_key = "groupId" - - def path(self, **kwargs) -> str: - return "group/bulk" - - class Issues(IncrementalJiraStream): """ https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-search/#api-rest-api-3-search-get @@ -479,41 +270,12 @@ def _get_custom_error(self, response: requests.Response) -> str: return "" -class IssueComments(IncrementalJiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-comments/#api-rest-api-3-issue-issueidorkey-comment-get - """ - - extract_field = "comments" - cursor_field = "updated" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.issues_stream = Issues( - authenticator=self.authenticator, - domain=self._domain, - projects=self._projects, - start_date=self._start_date, - ) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"issue/{stream_slice['key']}/comment" - - def read_records( - self, stream_slice: Optional[Mapping[str, Any]] = None, stream_state: Mapping[str, Any] = None, **kwargs - ) -> Iterable[Mapping[str, Any]]: - for issue in read_incremental(self.issues_stream, stream_state=stream_state): - stream_slice = {"key": issue["key"]} - yield from super().read_records(stream_slice=stream_slice, stream_state=stream_state, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["issueId"] = stream_slice["key"] - return record - - class IssueFields(JiraStream): """ https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-fields/#api-rest-api-3-field-get + + This stream is a dependency for the Issue stream, which in turn is a dependency for both the IssueComments and IssueWorklogs streams. + These latter streams cannot be migrated at the moment: https://github.com/airbytehq/airbyte-internal-issues/issues/7522 """ use_cache = True @@ -528,673 +290,142 @@ def field_ids_by_name(self) -> Mapping[str, List[str]]: return results -class IssueFieldConfigurations(JiraStream): +class Projects(JiraStream): """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-field-configurations/#api-rest-api-3-fieldconfiguration-get + https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-projects/#api-rest-api-3-project-search-get + + This stream is a dependency for the Issue stream, which in turn is a dependency for both the IssueComments and IssueWorklogs streams. + These latter streams cannot be migrated at the moment: https://github.com/airbytehq/airbyte-internal-issues/issues/7522 """ extract_field = "values" + use_cache = True def path(self, **kwargs) -> str: - return "fieldconfiguration" + return "project/search" + + def request_params(self, **kwargs): + params = super().request_params(**kwargs) + params["expand"] = "description,lead" + params["status"] = ["live", "archived", "deleted"] + return params + + def read_records(self, **kwargs) -> Iterable[Mapping[str, Any]]: + for project in super().read_records(**kwargs): + if not self._projects or project["key"] in self._projects: + yield project -class IssueCustomFieldContexts(JiraStream): +class IssueWorklogs(IncrementalJiraStream): """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-custom-field-contexts/#api-rest-api-3-field-fieldid-context-get + https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-worklogs/#api-rest-api-3-issue-issueidorkey-worklog-get + + Cannot be migrated at the moment: https://github.com/airbytehq/airbyte-internal-issues/issues/7522 """ - use_cache = True - extract_field = "values" - skip_http_status_codes = [ - # https://community.developer.atlassian.com/t/get-custom-field-contexts-not-found-returned/48408/2 - # /rest/api/3/field/{fieldId}/context - can return 404 if project style is not "classic" - requests.codes.NOT_FOUND, - # Only Jira administrators can access custom field contexts. - requests.codes.FORBIDDEN, - requests.codes.BAD_REQUEST, - ] + extract_field = "worklogs" + cursor_field = "updated" def __init__(self, **kwargs): super().__init__(**kwargs) - self.issue_fields_stream = IssueFields(authenticator=self.authenticator, domain=self._domain, projects=self._projects) + self.issues_stream = Issues( + authenticator=self.authenticator, + domain=self._domain, + projects=self._projects, + start_date=self._start_date, + ) def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"field/{stream_slice['field_id']}/context" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for field in read_full_refresh(self.issue_fields_stream): - if field.get("custom", False): - yield from super().read_records( - stream_slice={"field_id": field["id"], "field_type": field.get("schema", {}).get("type")}, **kwargs - ) + return f"issue/{stream_slice['key']}/worklog" - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["fieldId"] = stream_slice["field_id"] - record["fieldType"] = stream_slice["field_type"] - return record + def read_records( + self, stream_slice: Optional[Mapping[str, Any]] = None, stream_state: Mapping[str, Any] = None, **kwargs + ) -> Iterable[Mapping[str, Any]]: + for issue in read_incremental(self.issues_stream, stream_state=stream_state): + stream_slice = {"key": issue["key"]} + yield from super().read_records(stream_slice=stream_slice, stream_state=stream_state, **kwargs) -class IssueCustomFieldOptions(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-custom-field-options/#api-rest-api-3-field-fieldid-context-contextid-option-get +class IssueComments(IncrementalJiraStream): """ + https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-comments/#api-rest-api-3-issue-issueidorkey-comment-get - skip_http_status_codes = [ - requests.codes.NOT_FOUND, - # Only Jira administrators can access custom field options. - requests.codes.FORBIDDEN, - requests.codes.BAD_REQUEST, - ] + Cannot be migrated at the moment: https://github.com/airbytehq/airbyte-internal-issues/issues/7522 + """ - extract_field = "values" + extract_field = "comments" + cursor_field = "updated" def __init__(self, **kwargs): super().__init__(**kwargs) - self.issue_custom_field_contexts_stream = IssueCustomFieldContexts( - authenticator=self.authenticator, domain=self._domain, projects=self._projects + self.issues_stream = Issues( + authenticator=self.authenticator, + domain=self._domain, + projects=self._projects, + start_date=self._start_date, ) def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"field/{stream_slice['field_id']}/context/{stream_slice['context_id']}/option" + return f"issue/{stream_slice['key']}/comment" - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for record in read_full_refresh(self.issue_custom_field_contexts_stream): - if record.get("fieldType") == "option": - yield from super().read_records(stream_slice={"field_id": record["fieldId"], "context_id": record["id"]}, **kwargs) + def read_records( + self, stream_slice: Optional[Mapping[str, Any]] = None, stream_state: Mapping[str, Any] = None, **kwargs + ) -> Iterable[Mapping[str, Any]]: + for issue in read_incremental(self.issues_stream, stream_state=stream_state): + stream_slice = {"key": issue["key"]} + yield from super().read_records(stream_slice=stream_slice, stream_state=stream_state, **kwargs) def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["fieldId"] = stream_slice["field_id"] - record["contextId"] = stream_slice["context_id"] + record["issueId"] = stream_slice["key"] return record -class IssueLinkTypes(JiraStream): +class PullRequests(IncrementalJiraStream): """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-link-types/#api-rest-api-3-issuelinktype-get + This stream uses an undocumented internal API endpoint used by the Jira + webapp. Jira does not publish any specifications about this endpoint, so the + only way to get details about it is to use a web browser, view a Jira issue + that has a linked pull request, and inspect the network requests using the + browser's developer console. """ - extract_field = "issueLinkTypes" - - def path(self, **kwargs) -> str: - return "issueLinkType" + cursor_field = "updated" + extract_field = "detail" + raise_on_http_errors = False + pr_regex = r"(?PPullRequestOverallDetails{openCount=(?P[0-9]+), mergedCount=(?P[0-9]+), declinedCount=(?P[0-9]+)})|(?Ppullrequest={dataType=pullrequest, state=(?P[a-zA-Z]+), stateCount=(?P[0-9]+)})" -class IssueNavigatorSettings(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-navigator-settings/#api-rest-api-3-settings-columns-get - """ + def __init__(self, issues_stream: Issues, issue_fields_stream: IssueFields, **kwargs): + super().__init__(**kwargs) + self.issues_stream = issues_stream + self.issue_fields_stream = issue_fields_stream - primary_key = None + @property + def url_base(self) -> str: + return f"https://{self._domain}/rest/dev-status/1.0/" def path(self, **kwargs) -> str: - return "settings/columns" + return "issue/detail" + # Currently, only GitHub pull requests are supported by this stream. The + # requirements for supporting other systems are unclear. + def request_params(self, stream_slice: Mapping[str, Any] = None, **kwargs): + params = super().request_params(stream_slice=stream_slice, **kwargs) + params["issueId"] = stream_slice["id"] + params["applicationType"] = "GitHub" + params["dataType"] = "branch" + return params -class IssueNotificationSchemes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-notification-schemes/#api-rest-api-3-notificationscheme-get - """ - - extract_field = "values" - - def path(self, **kwargs) -> str: - return "notificationscheme" - - -class IssuePriorities(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-priorities/#api-rest-api-3-priority-get - """ - - extract_field = "values" - - def path(self, **kwargs) -> str: - return "priority/search" - - -class IssuePropertyKeys(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-properties/#api-rest-api-3-issue-issueidorkey-properties-get - """ - - extract_field = "keys" - use_cache = True - skip_http_status_codes = [ - # Issue does not exist or you do not have permission to see it. - requests.codes.NOT_FOUND, - requests.codes.BAD_REQUEST, - ] - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - key = stream_slice["key"] - return f"issue/{key}/properties" - - def read_records(self, stream_slice: Mapping[str, Any], **kwargs) -> Iterable[Mapping[str, Any]]: - issue_key = stream_slice["key"] - yield from super().read_records(stream_slice={"key": issue_key}, **kwargs) - - -class IssueProperties(StartDateJiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-properties/#api-rest-api-3-issue-issueidorkey-properties-propertykey-get - """ - - primary_key = "key" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.issues_stream = Issues( - authenticator=self.authenticator, - domain=self._domain, - projects=self._projects, - start_date=self._start_date, - ) - self.issue_property_keys_stream = IssuePropertyKeys(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"issue/{stream_slice['issue_key']}/properties/{stream_slice['key']}" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for issue in read_full_refresh(self.issues_stream): - for property_key in self.issue_property_keys_stream.read_records(stream_slice={"key": issue["key"]}, **kwargs): - yield from super().read_records(stream_slice={"key": property_key["key"], "issue_key": issue["key"]}, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["issueId"] = stream_slice["issue_key"] - return record - - -class IssueRemoteLinks(StartDateJiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-remote-links/#api-rest-api-3-issue-issueidorkey-remotelink-get - """ - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.issues_stream = Issues( - authenticator=self.authenticator, - domain=self._domain, - projects=self._projects, - start_date=self._start_date, - ) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"issue/{stream_slice['key']}/remotelink" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for issue in read_full_refresh(self.issues_stream): - yield from super().read_records(stream_slice={"key": issue["key"]}, **kwargs) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["issueId"] = stream_slice["key"] - return record - - -class IssueResolutions(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-resolutions/#api-rest-api-3-resolution-search-get - """ - - extract_field = "values" - - def path(self, **kwargs) -> str: - return "resolution/search" - - -class IssueSecuritySchemes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-security-schemes/#api-rest-api-3-issuesecurityschemes-get - """ - - extract_field = "issueSecuritySchemes" - - def path(self, **kwargs) -> str: - return "issuesecurityschemes" - - -class IssueTypes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-types/#api-group-issue-types - """ - - def path(self, **kwargs) -> str: - return "issuetype" - - -class IssueTypeSchemes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-type-schemes/#api-rest-api-3-issuetypescheme-get - """ - - extract_field = "values" - - def path(self, **kwargs) -> str: - return "issuetypescheme" - - -class IssueTypeScreenSchemes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-type-screen-schemes/#api-rest-api-3-issuetypescreenscheme-get - """ - - extract_field = "values" - - def path(self, **kwargs) -> str: - return "issuetypescreenscheme" - - -class IssueTransitions(StartDateJiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issues/#api-rest-api-3-issue-issueidorkey-transitions-get - """ - - primary_key = ["issueId", "id"] - extract_field = "transitions" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.issues_stream = Issues( - authenticator=self.authenticator, - domain=self._domain, - projects=self._projects, - start_date=self._start_date, - ) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"issue/{stream_slice['key']}/transitions" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for issue in read_full_refresh(self.issues_stream): - yield from super().read_records(stream_slice={"key": issue["key"]}, **kwargs) - - def next_page_token(self, response: requests.Response) -> Optional[Mapping[str, Any]]: - return None - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["issueId"] = stream_slice["key"] - return record - - -class IssueVotes(StartDateJiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-votes/#api-rest-api-3-issue-issueidorkey-votes-get - - extract_field voters is commented, since it contains the - objects but does not contain information about exactly votes. The - original schema self, votes (number), hasVoted (bool) and list of voters. - The schema is correct but extract_field should not be applied. - """ - - # extract_field = "voters" - primary_key = None - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.issues_stream = Issues( - authenticator=self.authenticator, - domain=self._domain, - projects=self._projects, - start_date=self._start_date, - ) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"issue/{stream_slice['key']}/votes" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for issue in read_full_refresh(self.issues_stream): - yield from super().read_records(stream_slice={"key": issue["key"]}, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["issueId"] = stream_slice["key"] - return record - - -class IssueWatchers(StartDateJiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-watchers/#api-rest-api-3-issue-issueidorkey-watchers-get - - extract_field is commented for the same reason as issue_voters. - """ - - # extract_field = "watchers" - primary_key = None - skip_http_status_codes = [ - # Issue is not found or the user does not have permission to view it. - requests.codes.NOT_FOUND, - requests.codes.BAD_REQUEST, - ] - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.issues_stream = Issues( - authenticator=self.authenticator, - domain=self._domain, - projects=self._projects, - start_date=self._start_date, - ) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"issue/{stream_slice['key']}/watchers" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for issue in read_full_refresh(self.issues_stream): - yield from super().read_records(stream_slice={"key": issue["key"]}, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["issueId"] = stream_slice["key"] - return record - - -class IssueWorklogs(IncrementalJiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issue-worklogs/#api-rest-api-3-issue-issueidorkey-worklog-get - """ - - extract_field = "worklogs" - cursor_field = "updated" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.issues_stream = Issues( - authenticator=self.authenticator, - domain=self._domain, - projects=self._projects, - start_date=self._start_date, - ) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"issue/{stream_slice['key']}/worklog" - - def read_records( - self, stream_slice: Optional[Mapping[str, Any]] = None, stream_state: Mapping[str, Any] = None, **kwargs - ) -> Iterable[Mapping[str, Any]]: - for issue in read_incremental(self.issues_stream, stream_state=stream_state): - stream_slice = {"key": issue["key"]} - yield from super().read_records(stream_slice=stream_slice, stream_state=stream_state, **kwargs) - - -class JiraSettings(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-jira-settings/#api-rest-api-3-application-properties-get - """ - - def path(self, **kwargs) -> str: - return "application-properties" - - -class Labels(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-labels/#api-rest-api-3-label-get - """ - - extract_field = "values" - primary_key = "label" - - def path(self, **kwargs) -> str: - return "label" - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - return {"label": record} - - -class Permissions(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-permissions/#api-rest-api-3-permissions-get - """ - - extract_field = "permissions" - primary_key = "key" - - def path(self, **kwargs) -> str: - return "permissions" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_json = response.json() - records = response_json.get(self.extract_field, {}).values() - yield from records - - -class PermissionSchemes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-permission-schemes/#api-rest-api-3-permissionscheme-get - """ - - extract_field = "permissionSchemes" - - def path(self, **kwargs) -> str: - return "permissionscheme" - - -class Projects(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-projects/#api-rest-api-3-project-search-get - """ - - extract_field = "values" - use_cache = True - - def path(self, **kwargs) -> str: - return "project/search" - - def request_params(self, **kwargs): - params = super().request_params(**kwargs) - params["expand"] = "description,lead" - params["status"] = ["live", "archived", "deleted"] - return params - - def read_records(self, **kwargs) -> Iterable[Mapping[str, Any]]: - for project in super().read_records(**kwargs): - if not self._projects or project["key"] in self._projects: - yield project - - -class ProjectAvatars(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-avatars/#api-rest-api-3-project-projectidorkey-avatars-get - """ - - skip_http_status_codes = [ - # Project is not found or the user does not have permission to view the project. - requests.codes.UNAUTHORIZED, - requests.codes.NOT_FOUND, - ] - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.projects_stream = Projects(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"project/{stream_slice['key']}/avatars" - - def parse_response(self, response: requests.Response, **kwargs) -> Iterable[Mapping]: - response_json = response.json() - stream_slice = kwargs["stream_slice"] - for records in response_json.values(): - for record in records: - record["projectId"] = stream_slice["key"] - yield record - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for project in read_full_refresh(self.projects_stream): - yield from super().read_records(stream_slice={"key": project["key"]}, **kwargs) - - -class ProjectCategories(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-categories/#api-rest-api-3-projectcategory-get - """ - - skip_http_status_codes = [ - # Project is not found or the user does not have permission to view the project. - requests.codes.UNAUTHORIZED, - requests.codes.NOT_FOUND, - ] - - def path(self, **kwargs) -> str: - return "projectCategory" - - -class ProjectComponents(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-components/#api-rest-api-3-project-projectidorkey-component-get - """ - - extract_field = "values" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.projects_stream = Projects(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"project/{stream_slice['key']}/component" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for project in read_full_refresh(self.projects_stream): - yield from super().read_records(stream_slice={"key": project["key"]}, **kwargs) - - -class ProjectEmail(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-email/#api-rest-api-3-project-projectid-email-get - """ - - primary_key = "projectId" - skip_http_status_codes = [ - # You cannot edit the configuration of this project. - requests.codes.FORBIDDEN, - requests.codes.BAD_REQUEST, - ] - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.projects_stream = Projects(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"project/{stream_slice['project_id']}/email" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for project in read_full_refresh(self.projects_stream): - yield from super().read_records(stream_slice={"project_id": project["id"]}, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["projectId"] = stream_slice["project_id"] - return record - - -class ProjectPermissionSchemes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-permission-schemes/#api-rest-api-3-project-projectkeyorid-securitylevel-get - """ - - extract_field = "levels" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.projects_stream = Projects(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"project/{stream_slice['key']}/securitylevel" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for project in read_full_refresh(self.projects_stream): - yield from super().read_records(stream_slice={"key": project["key"]}, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["projectId"] = stream_slice["key"] - return record - - -class ProjectRoles(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-roles#api-rest-api-3-role-get - """ - - primary_key = "id" - - def path(self, **kwargs) -> str: - return "role" - - -class ProjectTypes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-types/#api-rest-api-3-project-type-get - """ - - primary_key = None - - def path(self, **kwargs) -> str: - return "project/type" - - -class ProjectVersions(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-project-versions/#api-rest-api-3-project-projectidorkey-version-get - """ - - extract_field = "values" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.projects_stream = Projects(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"project/{stream_slice['key']}/version" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for project in read_full_refresh(self.projects_stream): - yield from super().read_records(stream_slice={"key": project["key"]}, **kwargs) - - -class PullRequests(IncrementalJiraStream): - """ - This stream uses an undocumented internal API endpoint used by the Jira - webapp. Jira does not publish any specifications about this endpoint, so the - only way to get details about it is to use a web browser, view a Jira issue - that has a linked pull request, and inspect the network requests using the - browser's developer console. - """ - - cursor_field = "updated" - extract_field = "detail" - raise_on_http_errors = False - - pr_regex = r"(?PPullRequestOverallDetails{openCount=(?P[0-9]+), mergedCount=(?P[0-9]+), declinedCount=(?P[0-9]+)})|(?Ppullrequest={dataType=pullrequest, state=(?P[a-zA-Z]+), stateCount=(?P[0-9]+)})" - - def __init__(self, issues_stream: Issues, issue_fields_stream: IssueFields, **kwargs): - super().__init__(**kwargs) - self.issues_stream = issues_stream - self.issue_fields_stream = issue_fields_stream - - @property - def url_base(self) -> str: - return f"https://{self._domain}/rest/dev-status/1.0/" - - def path(self, **kwargs) -> str: - return "issue/detail" - - # Currently, only GitHub pull requests are supported by this stream. The - # requirements for supporting other systems are unclear. - def request_params(self, stream_slice: Mapping[str, Any] = None, **kwargs): - params = super().request_params(stream_slice=stream_slice, **kwargs) - params["issueId"] = stream_slice["id"] - params["applicationType"] = "GitHub" - params["dataType"] = "branch" - return params - - def has_pull_requests(self, dev_field) -> bool: - if not dev_field or dev_field == "{}": - return False - matches = 0 - for match in re.finditer(self.pr_regex, dev_field, re.MULTILINE): - if match.group("prDetails"): - matches += int(match.group("open")) + int(match.group("merged")) + int(match.group("declined")) - elif match.group("pr"): - matches += int(match.group("count")) - return matches > 0 + def has_pull_requests(self, dev_field) -> bool: + if not dev_field or dev_field == "{}": + return False + matches = 0 + for match in re.finditer(self.pr_regex, dev_field, re.MULTILINE): + if match.group("prDetails"): + matches += int(match.group("open")) + int(match.group("merged")) + int(match.group("declined")) + elif match.group("pr"): + matches += int(match.group("count")) + return matches > 0 def read_records( self, stream_slice: Optional[Mapping[str, Any]] = None, stream_state: Mapping[str, Any] = None, **kwargs @@ -1213,275 +444,3 @@ def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, record["id"] = stream_slice["id"] record[self.cursor_field] = stream_slice[self.cursor_field] return record - - -class Screens(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-screens/#api-rest-api-3-screens-get - """ - - extract_field = "values" - use_cache = True - - def path(self, **kwargs) -> str: - return "screens" - - -class ScreenTabs(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-screen-tabs/#api-rest-api-3-screens-screenid-tabs-get - """ - - raise_on_http_errors = False - use_cache = True - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.screens_stream = Screens(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"screens/{stream_slice['screen_id']}/tabs" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for screen in read_full_refresh(self.screens_stream): - yield from self.read_tab_records(stream_slice={"screen_id": screen["id"]}, **kwargs) - - def read_tab_records(self, stream_slice: Mapping[str, Any], **kwargs) -> Iterable[Mapping[str, Any]]: - screen_id = stream_slice["screen_id"] - for screen_tab in super().read_records(stream_slice={"screen_id": screen_id}, **kwargs): - """ - For some projects jira creates screens automatically, which does not present in UI, but exist in screens stream. - We receive 400 error "Screen with id {screen_id} does not exist" for tabs by these screens. - """ - bad_request_reached = re.match(r"Screen with id \d* does not exist", screen_tab.get("errorMessages", [""])[0]) - if bad_request_reached: - self.logger.info("Could not get screen tab for %s screen id. Reason: %s", screen_id, screen_tab["errorMessages"][0]) - return - yield screen_tab - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["screenId"] = stream_slice["screen_id"] - return record - - -class ScreenTabFields(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-screen-tab-fields/#api-rest-api-3-screens-screenid-tabs-tabid-fields-get - """ - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.screens_stream = Screens(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - self.screen_tabs_stream = ScreenTabs(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"screens/{stream_slice['screen_id']}/tabs/{stream_slice['tab_id']}/fields" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for screen in read_full_refresh(self.screens_stream): - for tab in self.screen_tabs_stream.read_tab_records(stream_slice={"screen_id": screen["id"]}, **kwargs): - if "id" in tab: # Check for proper tab record since the ScreenTabs stream doesn't throw http errors - yield from super().read_records(stream_slice={"screen_id": screen["id"], "tab_id": tab["id"]}, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["screenId"] = stream_slice["screen_id"] - record["tabId"] = stream_slice["tab_id"] - return record - - -class ScreenSchemes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-screen-schemes/#api-rest-api-3-screenscheme-get - """ - - extract_field = "values" - - def path(self, **kwargs) -> str: - return "screenscheme" - - -class Sprints(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/software/rest/api-group-board/#api-rest-agile-1-0-board-boardid-sprint-get - """ - - extract_field = "values" - use_cache = True - api_v1 = True - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.boards_stream = Boards(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def _get_custom_error(self, response: requests.Response) -> str: - if response.status_code == requests.codes.BAD_REQUEST: - errors = response.json().get("errorMessages") - for error_message in errors: - if "The board does not support sprints" in error_message: - return ( - "The board does not support sprints. The board does not have a sprint board. if it's a team-managed one, " - "does it have sprints enabled under project settings? If it's a company-managed one," - " check that it has at least one Scrum board associated with it." - ) - return "" - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"board/{stream_slice['board_id']}/sprint" - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - available_board_types = ["scrum", "simple"] - for board in read_full_refresh(self.boards_stream): - if board["type"] in available_board_types: - board_details = {"name": board["name"], "id": board["id"]} - self.logger.info(f"Fetching sprints for board: {board_details}") - yield from super().read_records(stream_slice={"board_id": board["id"]}, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["boardId"] = stream_slice["board_id"] - return record - - -class SprintIssues(IncrementalJiraStream): - """ - https://developer.atlassian.com/cloud/jira/software/rest/api-group-sprint/#api-rest-agile-1-0-sprint-sprintid-issue-get - """ - - cursor_field = "updated" - extract_field = "issues" - api_v1 = True - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.sprints_stream = Sprints(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - self.issue_fields_stream = IssueFields(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return f"sprint/{stream_slice['sprint_id']}/issue" - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any], - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - params["fields"] = stream_slice["fields"] - jql = self.jql_compare_date(stream_state) - if jql: - params["jql"] = jql - return params - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - fields = self.get_fields() - for sprint in read_full_refresh(self.sprints_stream): - stream_slice = {"sprint_id": sprint["id"], "fields": fields} - yield from super().read_records(stream_slice=stream_slice, **kwargs) - - def transform(self, record: MutableMapping[str, Any], stream_slice: Mapping[str, Any], **kwargs) -> MutableMapping[str, Any]: - record["issueId"] = record["id"] - record["id"] = "-".join([str(stream_slice["sprint_id"]), record["id"]]) - record["sprintId"] = stream_slice["sprint_id"] - record["created"] = record["fields"]["created"] - record["updated"] = record["fields"]["updated"] - return record - - def get_fields(self): - fields = ["key", "status", "created", "updated"] - field_ids_by_name = self.issue_fields_stream.field_ids_by_name() - for name in ["Story Points", "Story point estimate"]: - if name in field_ids_by_name: - fields.extend(field_ids_by_name[name]) - return fields - - -class TimeTracking(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-time-tracking/#api-rest-api-3-configuration-timetracking-list-get - """ - - primary_key = "key" - - def path(self, **kwargs) -> str: - return "configuration/timetracking/list" - - -class Users(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-users/#api-rest-api-3-users-search-get - """ - - primary_key = "accountId" - use_cache = True - - def path(self, **kwargs) -> str: - return "users/search" - - -class UsersGroupsDetailed(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-users/#api-rest-api-3-user-get - """ - - primary_key = "accountId" - - def __init__(self, **kwargs): - super().__init__(**kwargs) - self.users_stream = Users(authenticator=self.authenticator, domain=self._domain, projects=self._projects) - - def path(self, stream_slice: Mapping[str, Any], **kwargs) -> str: - return "user" - - def request_params( - self, - stream_state: Mapping[str, Any], - stream_slice: Mapping[str, Any], - next_page_token: Optional[Mapping[str, Any]] = None, - ) -> MutableMapping[str, Any]: - params = super().request_params(stream_state=stream_state, stream_slice=stream_slice, next_page_token=next_page_token) - params["accountId"] = stream_slice["accountId"] - params["expand"] = "groups,applicationRoles" - return params - - def read_records(self, stream_slice: Optional[Mapping[str, Any]] = None, **kwargs) -> Iterable[Mapping[str, Any]]: - for user in read_full_refresh(self.users_stream): - yield from super().read_records(stream_slice={"accountId": user["accountId"]}, **kwargs) - - -class Workflows(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-workflows/#api-rest-api-3-workflow-search-get - """ - - extract_field = "values" - - def path(self, **kwargs) -> str: - return "workflow/search" - - -class WorkflowSchemes(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-workflow-schemes/#api-rest-api-3-workflowscheme-get - """ - - extract_field = "values" - - def path(self, **kwargs) -> str: - return "workflowscheme" - - -class WorkflowStatuses(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-workflow-statuses/#api-rest-api-3-status-get - """ - - def path(self, **kwargs) -> str: - return "status" - - -class WorkflowStatusCategories(JiraStream): - """ - https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-workflow-status-categories/#api-rest-api-3-statuscategory-get - """ - - def path(self, **kwargs) -> str: - return "statuscategory" diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/__init__.py b/airbyte-integrations/connectors/source-jira/unit_tests/__init__.py deleted file mode 100644 index c941b3045795..000000000000 --- a/airbyte-integrations/connectors/source-jira/unit_tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/conftest.py b/airbyte-integrations/connectors/source-jira/unit_tests/conftest.py index 4421dbc4641f..fc6e1ff2cb8b 100644 --- a/airbyte-integrations/connectors/source-jira/unit_tests/conftest.py +++ b/airbyte-integrations/connectors/source-jira/unit_tests/conftest.py @@ -4,118 +4,30 @@ import json import os +from pathlib import Path import responses from pytest import fixture from responses import matchers -from source_jira.streams import ( - ApplicationRoles, - Avatars, - BoardIssues, - Boards, - Dashboards, - Filters, - FilterSharing, - Groups, - IssueComments, - IssueCustomFieldContexts, - IssueFieldConfigurations, - IssueFields, - IssueLinkTypes, - IssueNavigatorSettings, - IssueNotificationSchemes, - IssuePriorities, - IssuePropertyKeys, - IssueRemoteLinks, - IssueResolutions, - Issues, - IssueSecuritySchemes, - IssueTypeSchemes, - IssueVotes, - IssueWatchers, - IssueWorklogs, - JiraSettings, - Labels, - Permissions, - ProjectAvatars, - ProjectCategories, - ProjectComponents, - ProjectEmail, - ProjectPermissionSchemes, - Projects, - ProjectVersions, - Screens, - ScreenTabs, - SprintIssues, - Sprints, - TimeTracking, - Users, - UsersGroupsDetailed, - Workflows, - WorkflowSchemes, - WorkflowStatusCategories, - WorkflowStatuses, -) - - -@fixture(scope="session", autouse=True) -def disable_cache(): - classes = [ - ApplicationRoles, - Avatars, - BoardIssues, - Boards, - Dashboards, - Filters, - FilterSharing, - Groups, - IssueComments, - IssueCustomFieldContexts, - IssueFieldConfigurations, - IssueFields, - IssueLinkTypes, - IssueNavigatorSettings, - IssueNotificationSchemes, - IssuePriorities, - IssuePropertyKeys, - IssueRemoteLinks, - IssueResolutions, - Issues, - IssueSecuritySchemes, - IssueTypeSchemes, - IssueVotes, - IssueWatchers, - IssueWorklogs, - JiraSettings, - Labels, - Permissions, - ProjectAvatars, - ProjectCategories, - ProjectComponents, - ProjectEmail, - ProjectPermissionSchemes, - Projects, - ProjectVersions, - Screens, - ScreenTabs, - SprintIssues, - Sprints, - TimeTracking, - Users, - UsersGroupsDetailed, - Workflows, - WorkflowSchemes, - WorkflowStatusCategories, - WorkflowStatuses, - ] - for cls in classes: - # Disabling cache for all streams to assess the number of calls made for each stream. - # Additionally, this is necessary as the responses library has been returning unexpected call counts - # following the recent update to HttpStream - cls.use_cache = False - - -os.environ["REQUEST_CACHE_PATH"] = "REQUEST_CACHE_PATH" +from source_jira.source import SourceJira + +ENV_REQUEST_CACHE_PATH = "REQUEST_CACHE_PATH" +os.environ["REQUEST_CACHE_PATH"] = ENV_REQUEST_CACHE_PATH + + +def delete_cache_files(cache_directory): + directory_path = Path(cache_directory) + if directory_path.exists() and directory_path.is_dir(): + for file_path in directory_path.glob("*.sqlite"): + file_path.unlink() + + +@fixture(autouse=True) +def clear_cache_before_each_test(): + # The problem: Once the first request is cached, we will keep getting the cached result no matter what setup we prepared for a particular test. + # Solution: We must delete the cache before each test because for the same URL, we want to define multiple responses and status codes. + delete_cache_files(os.getenv(ENV_REQUEST_CACHE_PATH)) + yield @fixture @@ -375,7 +287,6 @@ def projects_versions_response(): @fixture def mock_projects_responses(config, projects_response): - Projects.use_cache = False responses.add( responses.GET, f"https://{config['domain']}/rest/api/3/project/search?maxResults=50&expand=description%2Clead&status=live&status=archived&status=deleted", @@ -385,7 +296,6 @@ def mock_projects_responses(config, projects_response): @fixture def mock_projects_responses_additional_project(config, projects_response): - Projects.use_cache = False projects_response["values"] += [{"id": "3", "key": "Project3"}, {"id": "4", "key": "Project4"}] responses.add( responses.GET, @@ -395,7 +305,7 @@ def mock_projects_responses_additional_project(config, projects_response): @fixture -def mock_issues_responses(config, issues_response): +def mock_issues_responses_with_date_filter(config, issues_response): responses.add( responses.GET, f"https://{config['domain']}/rest/api/3/search", @@ -404,7 +314,7 @@ def mock_issues_responses(config, issues_response): { "maxResults": 50, "fields": "*all", - "jql": "project in (1) ORDER BY updated asc", + "jql": "updated >= '2021/01/01 00:00' and project in (1) ORDER BY updated asc", "expand": "renderedFields,transitions,changelog", } ) @@ -419,7 +329,7 @@ def mock_issues_responses(config, issues_response): { "maxResults": 50, "fields": "*all", - "jql": "project in (2) ORDER BY updated asc", + "jql": "updated >= '2021/01/01 00:00' and project in (2) ORDER BY updated asc", "expand": "renderedFields,transitions,changelog", } ) @@ -434,7 +344,7 @@ def mock_issues_responses(config, issues_response): { "maxResults": 50, "fields": "*all", - "jql": "project in (3) ORDER BY updated asc", + "jql": "updated >= '2021/01/01 00:00' and project in (3) ORDER BY updated asc", "expand": "renderedFields,transitions,changelog", } ) @@ -450,7 +360,7 @@ def mock_issues_responses(config, issues_response): { "maxResults": 50, "fields": "*all", - "jql": "project in (4) ORDER BY updated asc", + "jql": "updated >= '2021/01/01 00:00' and project in (4) ORDER BY updated asc", "expand": "renderedFields,transitions,changelog", } ) @@ -477,23 +387,23 @@ def mock_issues_responses(config, issues_response): def mock_project_emails(config, project_email_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/project/1/email?maxResults=50", + f"https://{config['domain']}/rest/api/3/project/1/email", json=project_email_response, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/project/2/email?maxResults=50", + f"https://{config['domain']}/rest/api/3/project/2/email", json=project_email_response, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/project/3/email?maxResults=50", + f"https://{config['domain']}/rest/api/3/project/3/email", json={"errorMessages": ["No access to emails for project 3"]}, status=403, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/project/4/email?maxResults=50", + f"https://{config['domain']}/rest/api/3/project/4/email", json=project_email_response, ) @@ -502,12 +412,12 @@ def mock_project_emails(config, project_email_response): def mock_issue_watchers_responses(config, issue_watchers_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/watchers?maxResults=50", + f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/watchers", json=issue_watchers_response, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-2/watchers?maxResults=50", + f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-2/watchers", json={"errorMessages": ["Not found watchers for issue TESTKEY13-2"]}, status=404, ) @@ -567,7 +477,7 @@ def mock_issue_custom_field_options_response(config, issue_custom_field_options_ def mock_fields_response(config, issue_fields_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/field?maxResults=50", + f"https://{config['domain']}/rest/api/3/field", json=issue_fields_response, ) @@ -625,3 +535,10 @@ def mock_sprints_response(config, sprints_response): f"https://{config['domain']}/rest/agile/1.0/board/3/sprint?maxResults=50", json=sprints_response, ) + + +def find_stream(stream_name, config): + for stream in SourceJira().streams(config=config): + if stream.name == stream_name: + return stream + raise ValueError(f"Stream {stream_name} not found") diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/responses/issue_fields.json b/airbyte-integrations/connectors/source-jira/unit_tests/responses/issue_fields.json index 37ac23f16ab3..24acf71e024d 100644 --- a/airbyte-integrations/connectors/source-jira/unit_tests/responses/issue_fields.json +++ b/airbyte-integrations/connectors/source-jira/unit_tests/responses/issue_fields.json @@ -64,5 +64,19 @@ "type": "option", "system": "issuetype" } + }, + { + "id": "PrIssueId", + "key": "PrIssueKey", + "name": "Development", + "custom": false, + "orderable": true, + "navigable": true, + "searchable": true, + "clauseNames": ["issuetype", "type"], + "schema": { + "type": "option", + "system": "issuetype" + } } ] diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/responses/issues.json b/airbyte-integrations/connectors/source-jira/unit_tests/responses/issues.json index 3ece380f3da0..59596102b15e 100644 --- a/airbyte-integrations/connectors/source-jira/unit_tests/responses/issues.json +++ b/airbyte-integrations/connectors/source-jira/unit_tests/responses/issues.json @@ -241,7 +241,8 @@ "maxResults": 20, "total": 0, "worklogs": [] - } + }, + "PrIssueId": "PullRequestOverallDetails{openCount=1, mergedCount=1, declinedCount=1}" }, "projectId": "10016", "projectKey": "TESTKEY13", diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/test_components.py b/airbyte-integrations/connectors/source-jira/unit_tests/test_components.py new file mode 100644 index 000000000000..89258a55bc35 --- /dev/null +++ b/airbyte-integrations/connectors/source-jira/unit_tests/test_components.py @@ -0,0 +1,121 @@ +# Copyright (c) 2024 Airbyte, Inc., all rights reserved. + +from unittest.mock import MagicMock + +import pytest +import requests +from airbyte_cdk.sources.declarative.types import StreamSlice +from source_jira.components.extractors import LabelsRecordExtractor +from source_jira.components.partition_routers import SprintIssuesSubstreamPartitionRouter, SubstreamPartitionRouterWithContext + + +@pytest.mark.parametrize( + "json_response, expected_output", + [ + ({"values": ["label 1", "label 2", "label 3"]}, [{"label": "label 1"}, {"label": "label 2"}, {"label": "label 3"}]), + (["label 1", "label 2", "label 3"], []), + ([], []), # Testing an empty response + ], +) +def test_labels_record_extractor(json_response, expected_output): + # Create the extractor instance directly in the test function + extractor = LabelsRecordExtractor(["values"], {}, {}) + # Set up the mocked response + response_mock = MagicMock(spec=requests.Response) + response_mock.json.return_value = json_response # Parameterized JSON response + + # Call the extract_records to process the mocked response + extracted = extractor.extract_records(response_mock) + + # Assert to check if the output matches the expected result + assert extracted == expected_output, "The extracted records do not match the expected output" + + +@pytest.mark.parametrize( + "records, expected_slices", + [ + # No records in parent stream + ([], []), + # Valid records generating stream slices, including checking parent records + ( + [{"id": 1, "parent_id": 100}, {"id": 2, "parent_id": 200}], + [ + {"partition": {"partition_id": 100, "parent_slice": {}}, "parent_record": {"id": 1, "parent_id": 100}}, + {"partition": {"partition_id": 200, "parent_slice": {}}, "parent_record": {"id": 2, "parent_id": 200}}, + ], + ), + ], +) +def test_stream_slices(records, expected_slices): + # Mock configuration and parent stream config + config = MagicMock() + parent_stream = MagicMock() + parent_key = MagicMock() + partition_field = MagicMock() + + parent_key.eval.return_value = "parent_id" + partition_field.eval.return_value = "partition_id" + + parent_stream_config = MagicMock() + parent_stream_config.stream = parent_stream + parent_stream_config.parent_key = parent_key + parent_stream_config.partition_field = partition_field + + # Initialize the router instance + router = SubstreamPartitionRouterWithContext(parent_stream_configs=[parent_stream_config], config=config, parameters={}) + + # Mocking parent stream's stream_slices and read_records + parent_stream.stream_slices.return_value = [{}] + parent_stream.read_records.return_value = records + + slices = list(router.stream_slices()) + + # Preparing the output for assertion + output = [{"partition": slice.partition, "parent_record": getattr(slice, "parent_record", None)} for slice in slices] + + assert output == expected_slices, f"Expected {expected_slices} but got {output}" + + +@pytest.mark.parametrize( + "fields_data, other_data, expected_fields, expected_partition", + [ + # Test case with one field value and one record from other parent stream + ( + [{"partition_id": "field_value"}], # fields parent stream output + [{"partition_id": "other_value"}], # other parent stream output + ["field_value"], # expected fields value in stream slices + "other_value", # expected partition value in stream slices + ) + ], +) +def test_sprint_issues_substream_partition_router(fields_data, other_data, expected_fields, expected_partition): + fields_parent_stream = MagicMock() + fields_parent_stream_config = MagicMock(stream=fields_parent_stream, partition_field=MagicMock()) + fields_parent_stream_config.partition_field.eval.return_value = "partition_id" + other_parent_stream = MagicMock() + other_parent_stream_config = MagicMock(stream=other_parent_stream, partition_field=MagicMock()) + other_parent_stream_config.partition_field.eval.return_value = "partition_id" + + # Initialize the router inside the test + router = SprintIssuesSubstreamPartitionRouter( + parent_stream_configs=[fields_parent_stream_config, other_parent_stream_config], config={}, parameters={} + ) + + # Mocking fields_parent_stream to return specific stream slices + fields_parent_stream.stream_slices.return_value = [StreamSlice(partition={"partition_id": val}, cursor_slice={}) for val in fields_data] + fields_parent_stream.read_records.return_value = [{"id": 1, "partition_id": val} for val in fields_data] + + # Mocking other_parent_stream to return specific stream slices + other_parent_stream.stream_slices.return_value = [StreamSlice(partition={"partition_id": val}, cursor_slice={}) for val in other_data] + other_parent_stream.read_records.return_value = [{"id": 2, "partition_id": val} for val in other_data] + + # Collecting results from stream_slices + slices = list(router.stream_slices()) + + # Asserting the correct parent stream fields are set in slices + assert all( + slice.parent_stream_fields == expected_fields for slice in slices + ), f"Expected parent stream fields {expected_fields}, but got {slice.parent_stream_fields}" + assert all( + slice.partition["partition_id"] == expected_partition for slice in slices + ), f"Expected partition ID {expected_partition}, but got {slice.partition['partition_id']}" diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/test_date_time_transformer.py b/airbyte-integrations/connectors/source-jira/unit_tests/test_date_time_transformer.py index b0bbe6f80b1c..7c9fbfff90ba 100644 --- a/airbyte-integrations/connectors/source-jira/unit_tests/test_date_time_transformer.py +++ b/airbyte-integrations/connectors/source-jira/unit_tests/test_date_time_transformer.py @@ -3,8 +3,7 @@ # import pytest -from source_jira.source import SourceJira -from source_jira.streams import ApplicationRoles +from conftest import find_stream @pytest.mark.parametrize( @@ -18,9 +17,7 @@ ], ) def test_converting_date_to_date_time(origin_item, sub_schema, expected, config): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ApplicationRoles(**args) + stream = find_stream("issue_comments", config) actual = stream.transformer.default_convert(origin_item, sub_schema) assert actual == expected @@ -28,9 +25,7 @@ def test_converting_date_to_date_time(origin_item, sub_schema, expected, config) def test_converting_date_with_incorrect_format_returning_original_value(config, caplog): sub_schema = {"type": "string", "format": "date-time"} incorrectly_formatted_date = "incorrectly_formatted_date" - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ApplicationRoles(**args) + stream = find_stream("issue_comments", config) actual = stream.transformer.default_convert(incorrectly_formatted_date, sub_schema) assert actual == incorrectly_formatted_date assert f"{incorrectly_formatted_date}: doesn't match expected format." in caplog.text diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/test_migrations/test_config.json b/airbyte-integrations/connectors/source-jira/unit_tests/test_migrations/test_config.json deleted file mode 100644 index 8745a889bf96..000000000000 --- a/airbyte-integrations/connectors/source-jira/unit_tests/test_migrations/test_config.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "api_token": "invalid_token", - "domain": "invaliddomain.atlassian.net", - "email": "no-reply@domain.com", - "start_date": "2023-01-01T00:00:00Z", - "projects": ["IT1", "IT1", "IT1"], - "expand_issue_changelog": true, - "render_fields": true, - "expand_issue_transition": false -} diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/test_migrations/test_config_migrations.py b/airbyte-integrations/connectors/source-jira/unit_tests/test_migrations/test_config_migrations.py deleted file mode 100644 index 2b124e0e6323..000000000000 --- a/airbyte-integrations/connectors/source-jira/unit_tests/test_migrations/test_config_migrations.py +++ /dev/null @@ -1,68 +0,0 @@ -# -# Copyright (c) 2023 Airbyte, Inc., all rights reserved. -# - - -import json -import os -from typing import Any, Mapping - -from airbyte_cdk.models import OrchestratorType, Type -from airbyte_cdk.sources import Source -from source_jira.config_migrations import MigrateIssueExpandProperties -from source_jira.source import SourceJira - -# BASE ARGS -CMD = "check" -TEST_CONFIG_PATH = f"{os.path.dirname(__file__)}/test_config.json" -SOURCE_INPUT_ARGS = [CMD, "--config", TEST_CONFIG_PATH] -SOURCE: Source = SourceJira() - - -# HELPERS -def load_config(config_path: str = TEST_CONFIG_PATH) -> Mapping[str, Any]: - with open(config_path, "r") as config: - return json.load(config) - - -def revert_migration(config_path: str = TEST_CONFIG_PATH) -> None: - with open(config_path, "r") as test_config: - config = json.load(test_config) - config.pop("issues_stream_expand_with") - with open(config_path, "w") as updated_config: - config = json.dumps(config) - updated_config.write(config) - - -def test_migrate_config(): - migration_instance = MigrateIssueExpandProperties() - # migrate the test_config - migration_instance.migrate(SOURCE_INPUT_ARGS, SOURCE) - # load the updated config - test_migrated_config = load_config() - # check migrated property - assert "issues_stream_expand_with" in test_migrated_config - assert isinstance(test_migrated_config["issues_stream_expand_with"], list) - # check the old property is in place - assert all(key in test_migrated_config for key in migration_instance.migrate_from_keys_map) - assert all(isinstance(test_migrated_config[key], bool) for key in migration_instance.migrate_from_keys_map) - # check the migration should be skipped, once already done - assert not migration_instance.should_migrate(test_migrated_config) - # test CONTROL MESSAGE was emitted - control_msg = migration_instance.message_repository._message_queue[0] - assert control_msg.type == Type.CONTROL - assert control_msg.control.type == OrchestratorType.CONNECTOR_CONFIG - # check the migrated values - assert control_msg.control.connectorConfig.config["issues_stream_expand_with"] == ["changelog", "renderedFields"] - # revert the test_config to the starting point - revert_migration() - - -def test_config_is_reverted(): - # check the test_config state, it has to be the same as before tests - test_config = load_config() - # check the config no longer has the migrated property - assert "issues_stream_expand_with" not in test_config - # check the old property is still there - assert all(key in test_config for key in MigrateIssueExpandProperties.migrate_from_keys_map) - assert all(isinstance(test_config[key], bool) for key in MigrateIssueExpandProperties.migrate_from_keys_map) diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/test_pagination.py b/airbyte-integrations/connectors/source-jira/unit_tests/test_pagination.py index bd9d2d65dd07..4b954ac8dfec 100644 --- a/airbyte-integrations/connectors/source-jira/unit_tests/test_pagination.py +++ b/airbyte-integrations/connectors/source-jira/unit_tests/test_pagination.py @@ -7,7 +7,8 @@ from http import HTTPStatus import responses -from source_jira.streams import Issues, Projects, Users +from conftest import find_stream +from source_jira.streams import Issues, Projects from source_jira.utils import read_full_refresh @@ -95,8 +96,9 @@ def test_pagination_issues(): @responses.activate -def test_pagination_users(): +def test_pagination_users(config): domain = "domain.com" + config["domain"] = domain responses_json = [ (HTTPStatus.OK, {}, json.dumps([{"self": "user1"}, {"self": "user2"}])), (HTTPStatus.OK, {}, json.dumps([{"self": "user3"}, {"self": "user4"}])), @@ -110,13 +112,16 @@ def test_pagination_users(): content_type="application/json", ) - stream = Users(authenticator=None, domain=domain, projects=[]) - stream.page_size = 2 + stream = find_stream("users", config) + stream.retriever.paginator.pagination_strategy.page_size = 2 records = list(read_full_refresh(stream)) - assert records == [ + expected_records = [ {"self": "user1"}, {"self": "user2"}, {"self": "user3"}, {"self": "user4"}, {"self": "user5"}, ] + + for rec, exp in zip(records, expected_records): + assert dict(rec) == exp, f"Failed at {rec} vs {exp}" diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/test_source.py b/airbyte-integrations/connectors/source-jira/unit_tests/test_source.py index 4cec82b00478..5b427df66b66 100644 --- a/airbyte-integrations/connectors/source-jira/unit_tests/test_source.py +++ b/airbyte-integrations/connectors/source-jira/unit_tests/test_source.py @@ -27,12 +27,12 @@ def test_check_connection_config_no_access_to_one_stream(config, caplog, project ) responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/applicationrole?maxResults=50", + f"https://{config['domain']}/rest/api/3/applicationrole", status=401, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/avatar/issuetype/system?maxResults=50", + f"https://{config['domain']}/rest/api/3/avatar/issuetype/system", json=avatars_response, ) responses.add(responses.GET, f"https://{config['domain']}/rest/api/3/label?maxResults=50", status=401) diff --git a/airbyte-integrations/connectors/source-jira/unit_tests/test_streams.py b/airbyte-integrations/connectors/source-jira/unit_tests/test_streams.py index 00675fa25ab1..9ba636fc5ff8 100644 --- a/airbyte-integrations/connectors/source-jira/unit_tests/test_streams.py +++ b/airbyte-integrations/connectors/source-jira/unit_tests/test_streams.py @@ -2,123 +2,71 @@ # Copyright (c) 2023 Airbyte, Inc., all rights reserved. # -import logging +import re import pendulum import pytest -import requests import responses -from airbyte_cdk.models import SyncMode -from requests.exceptions import HTTPError -from responses import matchers +from airbyte_cdk.sources.declarative.exceptions import ReadException +from conftest import find_stream from source_jira.source import SourceJira -from source_jira.streams import ( - ApplicationRoles, - Avatars, - BoardIssues, - Boards, - Dashboards, - Filters, - FilterSharing, - Groups, - IssueComments, - IssueCustomFieldContexts, - IssueCustomFieldOptions, - IssueFieldConfigurations, - IssueFields, - IssueLinkTypes, - IssueNavigatorSettings, - IssueNotificationSchemes, - IssuePriorities, - IssuePropertyKeys, - IssueRemoteLinks, - IssueResolutions, - Issues, - IssueSecuritySchemes, - IssueTypeSchemes, - IssueVotes, - IssueWatchers, - IssueWorklogs, - JiraSettings, - Labels, - Permissions, - ProjectAvatars, - ProjectCategories, - ProjectComponents, - ProjectEmail, - ProjectPermissionSchemes, - Projects, - ProjectVersions, - Screens, - ScreenTabs, - SprintIssues, - Sprints, - TimeTracking, - Users, - UsersGroupsDetailed, - Workflows, - WorkflowSchemes, - WorkflowStatusCategories, - WorkflowStatuses, -) -from source_jira.utils import read_full_refresh +from source_jira.streams import IssueFields, Issues, PullRequests +from source_jira.utils import read_full_refresh, read_incremental @responses.activate def test_application_roles_stream_401_error(config, caplog): config["domain"] = "test_application_domain" - responses.add(responses.GET, f"https://{config['domain']}/rest/api/3/applicationrole?maxResults=50", status=401) + responses.add(responses.GET, f"https://{config['domain']}/rest/api/3/applicationrole", status=401) authenticator = SourceJira().get_authenticator(config=config) args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ApplicationRoles(**args) - - is_available, reason = stream.check_availability(logger=logging.Logger, source=SourceJira()) + stream = find_stream("application_roles", config) - assert is_available is False - - assert reason == ( - "Unable to read application_roles stream. The endpoint https://test_application_domain/rest/api/3/applicationrole?maxResults=50 returned 401: Unauthorized. Invalid creds were provided, please check your api token, domain and/or email.. Please visit https://docs.airbyte.com/integrations/sources/jira to learn more. " - ) + with pytest.raises( + ReadException, + match="Request to https://test_application_domain/rest/api/3/applicationrole failed with status code 401 and error message None", + ): + list(read_full_refresh(stream)) @responses.activate def test_application_roles_stream(config, application_roles_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/applicationrole?maxResults=50", + f"https://{config['domain']}/rest/api/3/applicationrole", json=application_roles_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ApplicationRoles(**args) + stream = find_stream("application_roles", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 1 assert len(responses.calls) == 1 @responses.activate def test_application_roles_stream_http_error(config, application_roles_response): - responses.add( - responses.GET, f"https://{config['domain']}/rest/api/3/applicationrole?maxResults=50", json={"error": "not found"}, status=404 - ) + responses.add(responses.GET, f"https://{config['domain']}/rest/api/3/applicationrole", json={"error": "not found"}, status=404) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ApplicationRoles(**args) - with pytest.raises(HTTPError): - [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("application_roles", config) + with pytest.raises( + ReadException, match="Request to https://domain/rest/api/3/applicationrole failed with status code 404 and error message not found" + ): + list(read_full_refresh(stream)) @responses.activate -def test_boards_stream(config, mock_board_response): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Boards(**args) +def test_boards_stream(config, boards_response): + responses.add( + responses.GET, + f"https://{config['domain']}/rest/agile/1.0/board?maxResults=50", + json=boards_response, + ) + + stream = find_stream("boards", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 3 assert len(responses.calls) == 1 @@ -132,48 +80,40 @@ def test_board_stream_forbidden(config, boards_response, caplog): json={"error": f"403 Client Error: Forbidden for url: https://{config['domain']}/rest/agile/1.0/board?maxResults=50"}, status=403, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Boards(**args) - is_available, reason = stream.check_availability(logger=logging.Logger, source=SourceJira()) + stream = find_stream("boards", config) - assert is_available is False + expected_url = "https://test_boards_domain/rest/agile/1.0/board?maxResults=50" + escaped_url = re.escape(expected_url) - assert reason == ( - "Unable to read boards stream. The endpoint " - "https://test_boards_domain/rest/agile/1.0/board?maxResults=50 returned 403: " - "Forbidden. Please check the 'READ' permission(Scopes for Connect apps) " - "and/or the user has Jira Software rights and access.. Please visit " - "https://docs.airbyte.com/integrations/sources/jira to learn more. " - "403 Client Error: Forbidden for url: " - "https://test_boards_domain/rest/agile/1.0/board?maxResults=50" - ) + with pytest.raises( + ReadException, + match=( + f"Request to {escaped_url} failed with status code 403 and error message 403 Client Error: " f"Forbidden for url: {escaped_url}" + ), + ): + list(read_full_refresh(stream)) @responses.activate def test_dashboards_stream(config, dashboards_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/dashboard?maxResults=50", + f"https://{config['domain']}/rest/api/3/dashboard", json=dashboards_response, ) + + stream = find_stream("dashboards", config) + records = list(read_full_refresh(stream)) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Dashboards(**args) - - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 2 assert len(responses.calls) == 1 @responses.activate def test_filters_stream(config, mock_filter_response): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Filters(**args) + stream = find_stream("filters", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 1 assert len(responses.calls) == 1 @@ -186,24 +126,37 @@ def test_groups_stream(config, groups_response): json=groups_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Groups(**args) + stream = find_stream("groups", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 4 assert len(responses.calls) == 1 @responses.activate def test_issues_fields_stream(config, mock_fields_response): + stream = find_stream("issue_fields", config) + records = list(read_full_refresh(stream)) + + assert len(records) == 6 + assert len(responses.calls) == 1 + + +@responses.activate +def test_python_issues_fields_ids_by_name(config, mock_fields_response): authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} + args = {"authenticator": authenticator, "domain": config["domain"], "projects": config["projects"]} stream = IssueFields(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] - assert len(records) == 5 - assert len(responses.calls) == 1 + expected_ids_by_name = { + "Development": ["PrIssueId"], + "Status Category Changed": ["statuscategorychangedate"], + "Issue Type": ["issuetype"], + "Parent": ["parent"], + "Issue Type2": ["issuetype2"], + "Issue Type3": ["issuetype3"] + } + assert expected_ids_by_name == stream.field_ids_by_name() @responses.activate @@ -214,11 +167,9 @@ def test_issues_field_configurations_stream(config, issues_field_configurations_ json=issues_field_configurations_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueFieldConfigurations(**args) + stream = find_stream("issue_field_configurations", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 1 assert len(responses.calls) == 1 @@ -227,15 +178,13 @@ def test_issues_field_configurations_stream(config, issues_field_configurations_ def test_issues_link_types_stream(config, issues_link_types_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/issueLinkType?maxResults=50", + f"https://{config['domain']}/rest/api/3/issueLinkType", json=issues_link_types_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueLinkTypes(**args) + stream = find_stream("issue_link_types", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 3 assert len(responses.calls) == 1 @@ -244,15 +193,13 @@ def test_issues_link_types_stream(config, issues_link_types_response): def test_issues_navigator_settings_stream(config, issues_navigator_settings_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/settings/columns?maxResults=50", + f"https://{config['domain']}/rest/api/3/settings/columns", json=issues_navigator_settings_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueNavigatorSettings(**args) + stream = find_stream("issue_navigator_settings", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 3 assert len(responses.calls) == 1 @@ -265,11 +212,9 @@ def test_issue_notification_schemas_stream(config, issue_notification_schemas_re json=issue_notification_schemas_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueNotificationSchemes(**args) + stream = find_stream("issue_notification_schemes", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 2 assert len(responses.calls) == 1 @@ -282,11 +227,9 @@ def test_issue_properties_stream(config, issue_properties_response): json=issue_properties_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssuePriorities(**args) + stream = find_stream("issue_priorities", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 3 assert len(responses.calls) == 1 @@ -299,11 +242,9 @@ def test_issue_resolutions_stream(config, issue_resolutions_response): json=issue_resolutions_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueResolutions(**args) + stream = find_stream("issue_resolutions", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 3 assert len(responses.calls) == 1 @@ -312,15 +253,13 @@ def test_issue_resolutions_stream(config, issue_resolutions_response): def test_issue_security_schemes_stream(config, issue_security_schemes_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/issuesecurityschemes?maxResults=50", + f"https://{config['domain']}/rest/api/3/issuesecurityschemes", json=issue_security_schemes_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueSecuritySchemes(**args) + stream = find_stream("issue_security_schemes", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 2 assert len(responses.calls) == 1 @@ -333,11 +272,9 @@ def test_issue_type_schemes_stream(config, issue_type_schemes_response): json=issue_type_schemes_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueTypeSchemes(**args) + stream = find_stream("issue_type_schemes", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 3 assert len(responses.calls) == 1 @@ -346,15 +283,13 @@ def test_issue_type_schemes_stream(config, issue_type_schemes_response): def test_jira_settings_stream(config, jira_settings_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/application-properties?maxResults=50", + f"https://{config['domain']}/rest/api/3/application-properties", json=jira_settings_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = JiraSettings(**args) + stream = find_stream("jira_settings", config) + records = list(read_full_refresh(stream)) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] assert len(records) == 2 assert len(responses.calls) == 1 @@ -363,77 +298,63 @@ def test_jira_settings_stream(config, jira_settings_response): def test_board_issues_stream(config, mock_board_response, board_issues_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/agile/1.0/board/1/issue?maxResults=50&fields=key&fields=created&fields=updated", + f"https://{config['domain']}/rest/agile/1.0/board/1/issue?maxResults=50&fields=key&fields=created&fields=updated&jql=updated+%3E%3D+%272021%2F01%2F01+00%3A00%27", json=board_issues_response, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/agile/1.0/board/2/issue?maxResults=50&fields=key&fields=created&fields=updated", + f"https://{config['domain']}/rest/agile/1.0/board/2/issue?maxResults=50&fields=key&fields=created&fields=updated&jql=updated+%3E%3D+%272021%2F01%2F01+00%3A00%27", json={"errorMessages": ["This board has no columns with a mapped status."], "errors": {}}, status=500, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/agile/1.0/board/3/issue?maxResults=50&fields=key&fields=created&fields=updated", + f"https://{config['domain']}/rest/agile/1.0/board/3/issue?maxResults=50&fields=key&fields=created&fields=updated&jql=updated+%3E%3D+%272021%2F01%2F01+00%3A00%27", json={}, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = BoardIssues(**args) + stream = find_stream("board_issues", config) records = list(read_full_refresh(stream)) + assert len(records) == 1 assert len(responses.calls) == 4 -def test_stream_updated_state(config): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = BoardIssues(**args) - - current_stream_state = {"22": {"updated": "2023-10-01T00:00:00Z"}} - latest_record = {"boardId": 22, "updated": "2023-09-01T00:00:00Z"} - - assert {"22": {"updated": "2023-10-01T00:00:00Z"}} == stream.get_updated_state(current_stream_state=current_stream_state, latest_record=latest_record) - - @responses.activate def test_filter_sharing_stream(config, mock_filter_response, filter_sharing_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/filter/1/permission?maxResults=50", + f"https://{config['domain']}/rest/api/3/filter/1/permission", json=filter_sharing_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = FilterSharing(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("filter_sharing", config) + records = list(read_full_refresh(stream)) + assert len(records) == 1 assert len(responses.calls) == 2 @responses.activate def test_projects_stream(config, mock_projects_responses): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Projects(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("projects", config) + records = list(read_full_refresh(stream)) + assert len(records) == 1 + assert len(responses.calls) == 1 @responses.activate def test_projects_avatars_stream(config, mock_projects_responses, projects_avatars_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/project/Project1/avatars?maxResults=50", + f"https://{config['domain']}/rest/api/3/project/1/avatars", json=projects_avatars_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ProjectAvatars(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("project_avatars", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 2 @@ -442,24 +363,22 @@ def test_projects_avatars_stream(config, mock_projects_responses, projects_avata def test_projects_categories_stream(config, projects_categories_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/projectCategory?maxResults=50", + f"https://{config['domain']}/rest/api/3/projectCategory", json=projects_categories_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ProjectCategories(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("project_categories", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 1 @responses.activate def test_screens_stream(config, mock_screen_response): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Screens(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("screens", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 1 @@ -468,29 +387,27 @@ def test_screens_stream(config, mock_screen_response): def test_screen_tabs_stream(config, mock_screen_response, screen_tabs_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/screens/1/tabs?maxResults=50", + f"https://{config['domain']}/rest/api/3/screens/1/tabs", json=screen_tabs_response, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/screens/2/tabs?maxResults=50", + f"https://{config['domain']}/rest/api/3/screens/2/tabs", json={}, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ScreenTabs(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] - assert len(records) == 3 + stream = find_stream("screen_tabs", config) + records = list(read_full_refresh(stream)) + + assert len(records) == 2 assert len(responses.calls) == 3 @responses.activate def test_sprints_stream(config, mock_board_response, mock_sprints_response): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Sprints(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("sprints", config) + records = list(read_full_refresh(stream)) + assert len(records) == 3 assert len(responses.calls) == 4 @@ -502,39 +419,43 @@ def test_board_does_not_support_sprints(config, mock_board_response, sprints_res f"https://{config['domain']}/rest/agile/1.0/board/1/sprint?maxResults=50", json=sprints_response, ) + responses.add( + responses.GET, + f"https://{config['domain']}/rest/agile/1.0/board/2/sprint?maxResults=50", + json={"errorMessages": ["The board does not support sprints"], "errors": {}}, + status=400 + ) responses.add( responses.GET, f"https://{config['domain']}/rest/agile/1.0/board/3/sprint?maxResults=50", json=sprints_response, ) - url = f"https://{config['domain']}/rest/agile/1.0/board/2/sprint?maxResults=50" - error = {"errorMessages": ["The board does not support sprints"], "errors": {}} - responses.add(responses.GET, url, json=error, status=400) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Sprints(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("sprints", config) + records = list(read_full_refresh(stream)) assert len(records) == 2 - assert ( - "The board does not support sprints. The board does not have a sprint board. if it's a team-managed one, " - "does it have sprints enabled under project settings? If it's a company-managed one," - " check that it has at least one Scrum board associated with it." - ) in caplog.text + # No matter what the error_message value is, it is displayed as 'Ignoring response for failed request with error message None' + # Feature request is added to fix this problem. + # assert ( + # "The board does not support sprints. The board does not have a sprint board. if it's a team-managed one, " + # "does it have sprints enabled under project settings? If it's a company-managed one," + # " check that it has at least one Scrum board associated with it." + # ) in caplog.text + + assert "Ignoring response for failed request" in caplog.text @responses.activate def test_sprint_issues_stream(config, mock_board_response, mock_fields_response, mock_sprints_response, sprints_issues_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/agile/1.0/sprint/2/issue?maxResults=50&fields=key&fields=status&fields=created&fields=updated", + f"https://{config['domain']}/rest/agile/1.0/sprint/2/issue?maxResults=50&fields=key&fields=status&fields=created&fields=updated&jql=updated+%3E%3D+%272021%2F01%2F01+00%3A00%27", json=sprints_issues_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = SprintIssues(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("sprint_issues", config) + records = list(read_full_refresh(stream)) + assert len(records) == 3 assert len(responses.calls) == 8 @@ -543,24 +464,22 @@ def test_sprint_issues_stream(config, mock_board_response, mock_fields_response, def test_time_tracking_stream(config, time_tracking_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/configuration/timetracking/list?maxResults=50", + f"https://{config['domain']}/rest/api/3/configuration/timetracking/list", json=time_tracking_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = TimeTracking(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("time_tracking", config) + records = list(read_full_refresh(stream)) + assert len(records) == 1 assert len(responses.calls) == 1 @responses.activate def test_users_stream(config, mock_users_response): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Users(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("users", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 1 @@ -569,19 +488,18 @@ def test_users_stream(config, mock_users_response): def test_users_groups_detailed_stream(config, mock_users_response, users_groups_detailed_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/user?maxResults=50&accountId=1&expand=groups%2CapplicationRoles", + f"https://{config['domain']}/rest/api/3/user?accountId=1&expand=groups%2CapplicationRoles", json=users_groups_detailed_response, ) responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/user?maxResults=50&accountId=2&expand=groups%2CapplicationRoles", + f"https://{config['domain']}/rest/api/3/user?accountId=2&expand=groups%2CapplicationRoles", json=users_groups_detailed_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = UsersGroupsDetailed(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("users_groups_detailed", config) + records = list(read_full_refresh(stream)) + assert len(records) == 4 assert len(responses.calls) == 3 @@ -594,10 +512,9 @@ def test_workflows_stream(config, workflows_response): json=workflows_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Workflows(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("workflows", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 1 @@ -610,10 +527,9 @@ def test_workflow_schemas_stream(config, workflow_schemas_response): json=workflow_schemas_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = WorkflowSchemes(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("workflow_schemes", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 1 @@ -622,14 +538,13 @@ def test_workflow_schemas_stream(config, workflow_schemas_response): def test_workflow_statuses_stream(config, workflow_statuses_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/status?maxResults=50", + f"https://{config['domain']}/rest/api/3/status", json=workflow_statuses_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = WorkflowStatuses(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("workflow_statuses", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 1 @@ -638,14 +553,13 @@ def test_workflow_statuses_stream(config, workflow_statuses_response): def test_workflow_status_categories_stream(config, workflow_status_categories_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/statuscategory?maxResults=50", + f"https://{config['domain']}/rest/api/3/statuscategory", json=workflow_status_categories_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = WorkflowStatusCategories(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.incremental)] + stream = find_stream("workflow_status_categories", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 1 @@ -654,38 +568,65 @@ def test_workflow_status_categories_stream(config, workflow_status_categories_re def test_avatars_stream(config, avatars_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/avatar/issuetype/system?maxResults=50", + f"https://{config['domain']}/rest/api/3/avatar/issuetype/system", json=avatars_response, ) + responses.add( + responses.GET, + f"https://{config['domain']}/rest/api/3/avatar/project/system", + json={}, + ) + responses.add( + responses.GET, + f"https://{config['domain']}/rest/api/3/avatar/user/system", + json={}, + ) + + stream = find_stream("avatars", config) + records = list(read_full_refresh(stream)) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Avatars(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"avatar_type": "issuetype"})] assert len(records) == 2 - assert len(responses.calls) == 1 + assert len(responses.calls) == 3 @responses.activate def test_avatars_stream_should_retry(config, caplog): - url = f"https://{config['domain']}/rest/api/3/avatar/issuetype/system?maxResults=50" - responses.add(method=responses.GET, url=url, json={"errorMessages": ["The error message"], "errors": {}}, status=400) + for slice in ["issuetype", "project", "user"]: + responses.add( + responses.GET, + f"https://{config['domain']}/rest/api/3/avatar/{slice}/system", + json={"errorMessages": ["The error message"], "errors": {}}, + status=400 + ) + + stream = find_stream("avatars", config) + records = list(read_full_refresh(stream)) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Avatars(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"avatar_type": "issuetype"})] assert len(records) == 0 + assert "Ignoring response for failed request" in caplog.text - assert "The error message" in caplog.text + +@responses.activate +def test_declarative_issues_stream(config, mock_projects_responses_additional_project, mock_issues_responses_with_date_filter, caplog): + stream = find_stream("issues", {**config, "projects": config["projects"] + ["Project3"]}) + records = list(read_full_refresh(stream)) + assert len(records) == 1 + + # check if only None values was filtered out from 'fields' field + assert "empty_field" not in records[0]["fields"] + assert "non_empty_field" in records[0]["fields"] + + assert len(responses.calls) == 3 + # error_message = "Stream `issues`. An error occurred, details: [\"The value '3' does not exist for the field 'project'.\"]. Skipping for now. The user doesn't have permission to the project. Please grant the user to the project." + assert "Ignoring response for failed request with error message None" in caplog.messages @responses.activate -def test_issues_stream(config, mock_projects_responses_additional_project, mock_issues_responses, caplog): +def test_python_issues_stream(config, mock_projects_responses_additional_project, mock_issues_responses_with_date_filter, caplog): authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", []) + ["Project3"]} + args = {"authenticator": authenticator, "domain": config["domain"], "projects": config["projects"] + ["Project3"]} stream = Issues(**args) - records = list(read_full_refresh(stream)) + records = list(read_incremental(stream, {"updated": "2021-01-01T00:00:00Z"})) assert len(records) == 1 # check if only None values was filtered out from 'fields' field @@ -696,99 +637,123 @@ def test_issues_stream(config, mock_projects_responses_additional_project, mock_ error_message = "Stream `issues`. An error occurred, details: [\"The value '3' does not exist for the field 'project'.\"]. Skipping for now. The user doesn't have permission to the project. Please grant the user to the project." assert error_message in caplog.messages + +def test_python_issues_stream_updated_state(config): + authenticator = SourceJira().get_authenticator(config=config) + args = {"authenticator": authenticator, "domain": config["domain"], "projects": config["projects"]} + stream = Issues(**args) + + updated_state = stream.get_updated_state( + current_stream_state={"updated": "2021-01-01T00:00:00Z"}, + latest_record={"updated": "2021-01-02T00:00:00Z"} + ) + assert updated_state == {"updated": "2021-01-02T00:00:00Z"} + + +@pytest.mark.parametrize( + "dev_field, has_pull_request", + ( + ("PullRequestOverallDetails{openCount=1, mergedCount=1, declinedCount=1}", True), + ("PullRequestOverallDetails{openCount=0, mergedCount=0, declinedCount=0}", False), + ("pullrequest={dataType=pullrequest, state=thestate, stateCount=1}", True), + ("pullrequest={dataType=pullrequest, state=thestate, stateCount=0}", False), + ("{}", False), + ) +) +def test_python_pull_requests_stream_has_pull_request(config, dev_field, has_pull_request): + authenticator = SourceJira().get_authenticator(config=config) + args = {"authenticator": authenticator, "domain": config["domain"], "projects": config["projects"]} + issues_stream = Issues(**args) + issue_fields_stream = IssueFields(**args) + incremental_args = { + **args, + "start_date": pendulum.parse(config["start_date"]), + "lookback_window_minutes": 0, + } + pull_requests_stream = PullRequests(issues_stream=issues_stream, issue_fields_stream=issue_fields_stream, **incremental_args) + + assert has_pull_request == pull_requests_stream.has_pull_requests(dev_field) + + +@responses.activate +def test_python_pull_requests_stream_has_pull_request(config, mock_fields_response, mock_projects_responses_additional_project, mock_issues_responses_with_date_filter): + authenticator = SourceJira().get_authenticator(config=config) + args = {"authenticator": authenticator, "domain": config["domain"], "projects": config["projects"]} + issues_stream = Issues(**args) + issue_fields_stream = IssueFields(**args) + incremental_args = { + **args, + "start_date": pendulum.parse(config["start_date"]), + "lookback_window_minutes": 0, + } + stream = PullRequests(issues_stream=issues_stream, issue_fields_stream=issue_fields_stream, **incremental_args) + + responses.add( + responses.GET, + f"https://{config['domain']}/rest/dev-status/1.0/issue/detail?maxResults=50&issueId=10627&applicationType=GitHub&dataType=branch", + json={"detail": [{"id": "1", "name": "Source Jira: pull request"}]}, + ) + + records = list(read_incremental(stream, {"updated": "2021-01-01T00:00:00Z"})) + + assert len(records) == 1 + assert len(responses.calls) == 4 + + @pytest.mark.parametrize( "start_date, lookback_window, stream_state, expected_query", [ (pendulum.parse("2023-09-09T00:00:00Z"), 0, None, None), (None, 10, {"updated": "2023-12-14T09:47:00"}, "updated >= '2023/12/14 09:37'"), - (None, 0, {"updated": "2023-12-14T09:47:00"}, "updated >= '2023/12/14 09:47'") - ] + (None, 0, {"updated": "2023-12-14T09:47:00"}, "updated >= '2023/12/14 09:47'"), + ], ) def test_issues_stream_jql_compare_date(config, start_date, lookback_window, stream_state, expected_query, caplog): authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", []) + ["Project3"], - "lookback_window_minutes": pendulum.duration(minutes=lookback_window)} + args = { + "authenticator": authenticator, + "domain": config["domain"], + "projects": config.get("projects", []) + ["Project3"], + "lookback_window_minutes": pendulum.duration(minutes=lookback_window), + } stream = Issues(**args) assert stream.jql_compare_date(stream_state) == expected_query - @responses.activate -def test_issue_comments_stream(config, mock_projects_responses, mock_issues_responses, issue_comments_response): +def test_python_issue_comments_stream(config, mock_projects_responses, mock_issues_responses_with_date_filter, issue_comments_response): responses.add( responses.GET, f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/comment?maxResults=50", json=issue_comments_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueComments(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("issue_comments", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 3 @responses.activate def test_issue_custom_field_contexts_stream(config, mock_fields_response, mock_issue_custom_field_contexts_response): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueCustomFieldContexts(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] - assert len(records) == 2 - assert len(responses.calls) == 4 - - -@responses.activate -def test_issue_property_keys_stream(config, issue_property_keys_response): - responses.add( - responses.GET, - f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/properties?maxResults=50", - json=issue_property_keys_response, - ) + stream = find_stream("issue_custom_field_contexts", config) + records = list(read_full_refresh(stream)) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssuePropertyKeys(**args) - records = [ - r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"issue_key": "TESTKEY13-1", "key": "TESTKEY13-1"}) - ] assert len(records) == 2 - assert len(responses.calls) == 1 - - -@responses.activate -def test_issue_property_keys_stream_not_found_skip(config, issue_property_keys_response): - config["domain"] = "test_skip_properties" - responses.add( - responses.GET, - f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/properties?maxResults=50", - json={"errorMessages": ["Issue does not exist or you do not have permission to see it."], "errors": {}}, - status=404, - ) - - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssuePropertyKeys(**args) - records = [ - r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"issue_key": "TESTKEY13-1", "key": "TESTKEY13-1"}) - ] - assert len(records) == 0 - assert len(responses.calls) == 1 + assert len(responses.calls) == 4 @responses.activate def test_project_permissions_stream(config, mock_projects_responses, project_permissions_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/project/Project1/securitylevel?maxResults=50", + f"https://{config['domain']}/rest/api/3/project/Project1/securitylevel", json=project_permissions_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ProjectPermissionSchemes(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"key": "Project1"})] + stream = find_stream("project_permission_schemes", config) + records = list(read_full_refresh(stream)) expected_records = [ { "description": "Only the reporter and internal staff can see this issue.", @@ -806,15 +771,15 @@ def test_project_permissions_stream(config, mock_projects_responses, project_per }, ] assert len(records) == 2 - assert records == expected_records + for rec, exp in zip(records, expected_records): + assert dict(rec) == exp, f"Failed at {rec} vs {exp}" @responses.activate def test_project_email_stream(config, mock_projects_responses, mock_project_emails): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ProjectEmail(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("project_email", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 2 @@ -827,10 +792,9 @@ def test_project_components_stream(config, mock_projects_responses, project_comp json=project_components_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ProjectComponents(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"key": "Project1"})] + stream = find_stream("project_components", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 2 @@ -839,14 +803,13 @@ def test_project_components_stream(config, mock_projects_responses, project_comp def test_permissions_stream(config, permissions_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/permissions?maxResults=50", + f"https://{config['domain']}/rest/api/3/permissions", json=permissions_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Permissions(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("permissions", config) + records = list(read_full_refresh(stream)) + assert len(records) == 1 assert len(responses.calls) == 1 @@ -864,69 +827,68 @@ def test_labels_stream(config, labels_response): json={}, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = Labels(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("labels", config) + records = list(read_full_refresh(stream)) + assert len(records) == 2 assert len(responses.calls) == 2 @responses.activate -def test_issue_worklogs_stream(config, mock_projects_responses, mock_issues_responses, issue_worklogs_response): +def test_issue_worklogs_stream(config, mock_projects_responses, mock_issues_responses_with_date_filter, issue_worklogs_response): responses.add( responses.GET, f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/worklog?maxResults=50", json=issue_worklogs_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueWorklogs(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] + stream = find_stream("issue_worklogs", config) + records = list(read_full_refresh(stream)) + assert len(records) == 1 assert len(responses.calls) == 3 @responses.activate -def test_issue_watchers_stream(config, mock_projects_responses, mock_issues_responses, mock_issue_watchers_responses): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueWatchers(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh)] +def test_issue_watchers_stream(config, mock_projects_responses, mock_issues_responses_with_date_filter, issue_votes_response): + responses.add( + responses.GET, + f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/watchers", + json=issue_votes_response, + ) + + stream = find_stream("issue_watchers", config) + records = list(read_full_refresh(stream)) + assert len(records) == 1 assert len(responses.calls) == 3 @responses.activate -def test_issue_votes_stream(config, mock_projects_responses, mock_issues_responses, issue_votes_response): +def test_issue_votes_stream_slice(config, mock_projects_responses, mock_issues_responses_with_date_filter, issue_votes_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/votes?maxResults=50", + f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/votes", json=issue_votes_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueVotes(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"key": "Project1"})] + stream = find_stream("issue_votes", config) + records = list(read_full_refresh(stream)) assert len(records) == 1 assert len(responses.calls) == 3 @responses.activate -def test_issue_remote_links_stream(config, mock_projects_responses, mock_issues_responses, issue_remote_links_response): +def test_issue_remote_links_stream_(config, mock_projects_responses, mock_issues_responses_with_date_filter, issue_remote_links_response): responses.add( responses.GET, - f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/remotelink?maxResults=50", + f"https://{config['domain']}/rest/api/3/issue/TESTKEY13-1/remotelink", json=issue_remote_links_response, ) - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = IssueRemoteLinks(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"key": "Project1"})] + stream = find_stream("issue_remote_links", config) + records = list(read_full_refresh(stream)) assert len(records) == 2 assert len(responses.calls) == 3 @@ -942,8 +904,8 @@ def test_project_versions_stream(config, mock_projects_responses, projects_versi authenticator = SourceJira().get_authenticator(config=config) args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", [])} - stream = ProjectVersions(**args) - records = [r for r in stream.read_records(sync_mode=SyncMode.full_refresh, stream_slice={"key": "Project1"})] + stream = find_stream("project_versions", config) + records = list(read_full_refresh(stream)) assert len(records) == 2 assert len(responses.calls) == 2 @@ -953,36 +915,41 @@ def test_project_versions_stream(config, mock_projects_responses, projects_versi "stream, expected_records_number, expected_calls_number, log_message", [ ( - Issues, + "issues", 2, 4, - "Stream `issues`. An error occurred, details: [\"The value '3' does not " - "exist for the field 'project'.\"]. Skipping for now. The user doesn't have " - "permission to the project. Please grant the user to the project.", + "Ignoring response for failed request with error message None" + # "Stream `issues`. An error occurred, details: [\"The value '3' does not " + # "exist for the field 'project'.\"]. Skipping for now. The user doesn't have " + # "permission to the project. Please grant the user to the project.", ), ( - IssueCustomFieldContexts, + "issue_custom_field_contexts", 2, 4, - "Stream `issue_custom_field_contexts`. An error occurred, details: ['Not found issue custom field context for issue fields issuetype2']. Skipping for now. ", + "Ignoring response for failed request with error message None" + # "Stream `issue_custom_field_contexts`. An error occurred, details: ['Not found issue custom field context for issue fields issuetype2']. Skipping for now. ", ), ( - IssueCustomFieldOptions, + "issue_custom_field_options", 1, 6, - "Stream `issue_custom_field_options`. An error occurred, details: ['Not found issue custom field options for issue fields issuetype3']. Skipping for now. ", + "Ignoring response for failed request with error message None" + # "Stream `issue_custom_field_options`. An error occurred, details: ['Not found issue custom field options for issue fields issuetype3']. Skipping for now. ", ), ( - IssueWatchers, + "issue_watchers", 1, 6, - "Stream `issue_watchers`. An error occurred, details: ['Not found watchers for issue TESTKEY13-2']. Skipping for now. ", + "Ignoring response for failed request with error message None" + # "Stream `issue_watchers`. An error occurred, details: ['Not found watchers for issue TESTKEY13-2']. Skipping for now. ", ), ( - ProjectEmail, + "project_email", 4, 4, - "Stream `project_email`. An error occurred, details: ['No access to emails for project 3']. Skipping for now. ", + "Ignoring response for failed request with error message None" + # "Stream `project_email`. An error occurred, details: ['No access to emails for project 3']. Skipping for now. ", ), ], ) @@ -990,7 +957,7 @@ def test_project_versions_stream(config, mock_projects_responses, projects_versi def test_skip_slice( config, mock_projects_responses_additional_project, - mock_issues_responses, + mock_issues_responses_with_date_filter, mock_project_emails, mock_issue_watchers_responses, mock_issue_custom_field_contexts_response_error, @@ -1002,9 +969,8 @@ def test_skip_slice( expected_calls_number, log_message, ): - authenticator = SourceJira().get_authenticator(config=config) - args = {"authenticator": authenticator, "domain": config["domain"], "projects": config.get("projects", []) + ["Project3", "Project4"]} - stream = stream(**args) + config["projects"] = config.get("projects", []) + ["Project3", "Project4"] + stream = find_stream(stream, config) records = list(read_full_refresh(stream)) assert len(records) == expected_records_number diff --git a/docs/integrations/sources/jira-migrations.md b/docs/integrations/sources/jira-migrations.md index aba47c32ba5b..dc24276047a3 100644 --- a/docs/integrations/sources/jira-migrations.md +++ b/docs/integrations/sources/jira-migrations.md @@ -1,5 +1,31 @@ # Jira Migration Guide +## Upgrading to 2.0.0 + +We're continuously striving to enhance the quality and reliability of our connectors at Airbyte. As part of our commitment to delivering exceptional service, we are transitioning source-jira from the Python Connector Development Kit (CDK) to our innovative low-code framework. This is part of a strategic move to streamline many processes across connectors, bolstering maintainability and freeing us to focus more of our efforts on improving the performance and features of our evolving platform and growing catalog. However, due to differences between the Python and low-code CDKs, this migration constitutes a breaking change. + +To gracefully handle these changes for your existing connections, we highly recommend resetting your data before resuming your data syncs with the new version. + +1. Select **Connections** in the main navbar. +1.1 Select the connection(s) affected by the update. +2. Select the **Replication** tab. +2.1 Select **Refresh source schema**. + ```note + Any detected schema changes will be listed for your review. + ``` +2.2 Select **OK**. +3. Select **Save changes** at the bottom of the page. +3.1 Ensure the **Reset affected streams** option is checked. + ```note + Depending on destination type you may not be prompted to reset your data + ``` +4. Select **Save connection**. + ```note + This will reset the data in your destination and initiate a fresh sync. + ``` + +For more information on resetting your data in Airbyte, see [this page](https://docs.airbyte.com/operator-guides/reset). + ## Upgrading to 1.0.0 Note: this change is only breaking if you are using the `Boards Issues` stream in Incremental Sync mode. diff --git a/docs/integrations/sources/jira.md b/docs/integrations/sources/jira.md index e840629bf793..65ad82b71c7a 100644 --- a/docs/integrations/sources/jira.md +++ b/docs/integrations/sources/jira.md @@ -123,7 +123,8 @@ The Jira connector should not run into Jira API limitations under normal usage. ## CHANGELOG | Version | Date | Pull Request | Subject | -| :------ | :--------- | :--------------------------------------------------------- | :--------------------------------------------------------------------------------------------------------------------------------------------------------------- | +|:--------|:-----------|:-----------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 2.0.0 | 2024-04-20 | [37374](https://github.com/airbytehq/airbyte/pull/37374) | Migrate to low-code and fix `Project Avatars` stream | | 1.2.2 | 2024-04-19 | [36646](https://github.com/airbytehq/airbyte/pull/36646) | Updating to 0.80.0 CDK | | 1.2.1 | 2024-04-12 | [36646](https://github.com/airbytehq/airbyte/pull/36646) | schema descriptions | | 1.2.0 | 2024-03-19 | [36267](https://github.com/airbytehq/airbyte/pull/36267) | Pin airbyte-cdk version to `^0` |